2003-09-04 Eric Christopher <echristo@redhat.com>
[official-gcc.git] / gcc / stmt.c
blobb62f83eccb36b2de1848de5b94c0d55807ac2528
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
60 #include "target.h"
62 /* Assume that case vectors are not pc-relative. */
63 #ifndef CASE_VECTOR_PC_RELATIVE
64 #define CASE_VECTOR_PC_RELATIVE 0
65 #endif
67 /* Functions and data structures for expanding case statements. */
69 /* Case label structure, used to hold info on labels within case
70 statements. We handle "range" labels; for a single-value label
71 as in C, the high and low limits are the same.
73 An AVL tree of case nodes is initially created, and later transformed
74 to a list linked via the RIGHT fields in the nodes. Nodes with
75 higher case values are later in the list.
77 Switch statements can be output in one of two forms. A branch table
78 is used if there are more than a few labels and the labels are dense
79 within the range between the smallest and largest case value. If a
80 branch table is used, no further manipulations are done with the case
81 node chain.
83 The alternative to the use of a branch table is to generate a series
84 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
85 and PARENT fields to hold a binary tree. Initially the tree is
86 totally unbalanced, with everything on the right. We balance the tree
87 with nodes on the left having lower case values than the parent
88 and nodes on the right having higher values. We then output the tree
89 in order. */
91 struct case_node GTY(())
93 struct case_node *left; /* Left son in binary tree */
94 struct case_node *right; /* Right son in binary tree; also node chain */
95 struct case_node *parent; /* Parent of node in binary tree */
96 tree low; /* Lowest index value for this label */
97 tree high; /* Highest index value for this label */
98 tree code_label; /* Label to jump to when node matches */
99 int balance;
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
105 /* These are used by estimate_case_costs and balance_case_nodes. */
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static int use_cost_table;
110 static int cost_table_initialized;
112 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
113 is unsigned. */
114 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
116 /* Stack of control and binding constructs we are currently inside.
118 These constructs begin when you call `expand_start_WHATEVER'
119 and end when you call `expand_end_WHATEVER'. This stack records
120 info about how the construct began that tells the end-function
121 what to do. It also may provide information about the construct
122 to alter the behavior of other constructs within the body.
123 For example, they may affect the behavior of C `break' and `continue'.
125 Each construct gets one `struct nesting' object.
126 All of these objects are chained through the `all' field.
127 `nesting_stack' points to the first object (innermost construct).
128 The position of an entry on `nesting_stack' is in its `depth' field.
130 Each type of construct has its own individual stack.
131 For example, loops have `loop_stack'. Each object points to the
132 next object of the same type through the `next' field.
134 Some constructs are visible to `break' exit-statements and others
135 are not. Which constructs are visible depends on the language.
136 Therefore, the data structure allows each construct to be visible
137 or not, according to the args given when the construct is started.
138 The construct is visible if the `exit_label' field is non-null.
139 In that case, the value should be a CODE_LABEL rtx. */
141 struct nesting GTY(())
143 struct nesting *all;
144 struct nesting *next;
145 int depth;
146 rtx exit_label;
147 enum nesting_desc {
148 COND_NESTING,
149 LOOP_NESTING,
150 BLOCK_NESTING,
151 CASE_NESTING
152 } desc;
153 union nesting_u
155 /* For conds (if-then and if-then-else statements). */
156 struct nesting_cond
158 /* Label for the end of the if construct.
159 There is none if EXITFLAG was not set
160 and no `else' has been seen yet. */
161 rtx endif_label;
162 /* Label for the end of this alternative.
163 This may be the end of the if or the next else/elseif. */
164 rtx next_label;
165 } GTY ((tag ("COND_NESTING"))) cond;
166 /* For loops. */
167 struct nesting_loop
169 /* Label at the top of the loop; place to loop back to. */
170 rtx start_label;
171 /* Label at the end of the whole construct. */
172 rtx end_label;
173 /* Label for `continue' statement to jump to;
174 this is in front of the stepper of the loop. */
175 rtx continue_label;
176 } GTY ((tag ("LOOP_NESTING"))) loop;
177 /* For variable binding contours. */
178 struct nesting_block
180 /* Sequence number of this binding contour within the function,
181 in order of entry. */
182 int block_start_count;
183 /* Nonzero => value to restore stack to on exit. */
184 rtx stack_level;
185 /* The NOTE that starts this contour.
186 Used by expand_goto to check whether the destination
187 is within each contour or not. */
188 rtx first_insn;
189 /* Innermost containing binding contour that has a stack level. */
190 struct nesting *innermost_stack_block;
191 /* List of cleanups to be run on exit from this contour.
192 This is a list of expressions to be evaluated.
193 The TREE_PURPOSE of each link is the ..._DECL node
194 which the cleanup pertains to. */
195 tree cleanups;
196 /* List of cleanup-lists of blocks containing this block,
197 as they were at the locus where this block appears.
198 There is an element for each containing block,
199 ordered innermost containing block first.
200 The tail of this list can be 0,
201 if all remaining elements would be empty lists.
202 The element's TREE_VALUE is the cleanup-list of that block,
203 which may be null. */
204 tree outer_cleanups;
205 /* Chain of labels defined inside this binding contour.
206 For contours that have stack levels or cleanups. */
207 struct label_chain *label_chain;
208 /* Nonzero if this is associated with an EH region. */
209 int exception_region;
210 /* The saved target_temp_slot_level from our outer block.
211 We may reset target_temp_slot_level to be the level of
212 this block, if that is done, target_temp_slot_level
213 reverts to the saved target_temp_slot_level at the very
214 end of the block. */
215 int block_target_temp_slot_level;
216 /* True if we are currently emitting insns in an area of
217 output code that is controlled by a conditional
218 expression. This is used by the cleanup handling code to
219 generate conditional cleanup actions. */
220 int conditional_code;
221 /* A place to move the start of the exception region for any
222 of the conditional cleanups, must be at the end or after
223 the start of the last unconditional cleanup, and before any
224 conditional branch points. */
225 rtx last_unconditional_cleanup;
226 } GTY ((tag ("BLOCK_NESTING"))) block;
227 /* For switch (C) or case (Pascal) statements,
228 and also for dummies (see `expand_start_case_dummy'). */
229 struct nesting_case
231 /* The insn after which the case dispatch should finally
232 be emitted. Zero for a dummy. */
233 rtx start;
234 /* A list of case labels; it is first built as an AVL tree.
235 During expand_end_case, this is converted to a list, and may be
236 rearranged into a nearly balanced binary tree. */
237 struct case_node *case_list;
238 /* Label to jump to if no case matches. */
239 tree default_label;
240 /* The expression to be dispatched on. */
241 tree index_expr;
242 /* Type that INDEX_EXPR should be converted to. */
243 tree nominal_type;
244 /* Name of this kind of statement, for warnings. */
245 const char *printname;
246 /* Used to save no_line_numbers till we see the first case label.
247 We set this to -1 when we see the first case label in this
248 case statement. */
249 int line_number_status;
250 } GTY ((tag ("CASE_NESTING"))) case_stmt;
251 } GTY ((desc ("%1.desc"))) data;
254 /* Allocate and return a new `struct nesting'. */
256 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
258 /* Pop the nesting stack element by element until we pop off
259 the element which is at the top of STACK.
260 Update all the other stacks, popping off elements from them
261 as we pop them from nesting_stack. */
263 #define POPSTACK(STACK) \
264 do { struct nesting *target = STACK; \
265 struct nesting *this; \
266 do { this = nesting_stack; \
267 if (loop_stack == this) \
268 loop_stack = loop_stack->next; \
269 if (cond_stack == this) \
270 cond_stack = cond_stack->next; \
271 if (block_stack == this) \
272 block_stack = block_stack->next; \
273 if (stack_block_stack == this) \
274 stack_block_stack = stack_block_stack->next; \
275 if (case_stack == this) \
276 case_stack = case_stack->next; \
277 nesting_depth = nesting_stack->depth - 1; \
278 nesting_stack = this->all; } \
279 while (this != target); } while (0)
281 /* In some cases it is impossible to generate code for a forward goto
282 until the label definition is seen. This happens when it may be necessary
283 for the goto to reset the stack pointer: we don't yet know how to do that.
284 So expand_goto puts an entry on this fixup list.
285 Each time a binding contour that resets the stack is exited,
286 we check each fixup.
287 If the target label has now been defined, we can insert the proper code. */
289 struct goto_fixup GTY(())
291 /* Points to following fixup. */
292 struct goto_fixup *next;
293 /* Points to the insn before the jump insn.
294 If more code must be inserted, it goes after this insn. */
295 rtx before_jump;
296 /* The LABEL_DECL that this jump is jumping to, or 0
297 for break, continue or return. */
298 tree target;
299 /* The BLOCK for the place where this goto was found. */
300 tree context;
301 /* The CODE_LABEL rtx that this is jumping to. */
302 rtx target_rtl;
303 /* Number of binding contours started in current function
304 before the label reference. */
305 int block_start_count;
306 /* The outermost stack level that should be restored for this jump.
307 Each time a binding contour that resets the stack is exited,
308 if the target label is *not* yet defined, this slot is updated. */
309 rtx stack_level;
310 /* List of lists of cleanup expressions to be run by this goto.
311 There is one element for each block that this goto is within.
312 The tail of this list can be 0,
313 if all remaining elements would be empty.
314 The TREE_VALUE contains the cleanup list of that block as of the
315 time this goto was seen.
316 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
317 tree cleanup_list_list;
320 /* Within any binding contour that must restore a stack level,
321 all labels are recorded with a chain of these structures. */
323 struct label_chain GTY(())
325 /* Points to following fixup. */
326 struct label_chain *next;
327 tree label;
330 struct stmt_status GTY(())
332 /* Chain of all pending binding contours. */
333 struct nesting * x_block_stack;
335 /* If any new stacks are added here, add them to POPSTACKS too. */
337 /* Chain of all pending binding contours that restore stack levels
338 or have cleanups. */
339 struct nesting * x_stack_block_stack;
341 /* Chain of all pending conditional statements. */
342 struct nesting * x_cond_stack;
344 /* Chain of all pending loops. */
345 struct nesting * x_loop_stack;
347 /* Chain of all pending case or switch statements. */
348 struct nesting * x_case_stack;
350 /* Separate chain including all of the above,
351 chained through the `all' field. */
352 struct nesting * x_nesting_stack;
354 /* Number of entries on nesting_stack now. */
355 int x_nesting_depth;
357 /* Number of binding contours started so far in this function. */
358 int x_block_start_count;
360 /* Each time we expand an expression-statement,
361 record the expr's type and its RTL value here. */
362 tree x_last_expr_type;
363 rtx x_last_expr_value;
365 /* Nonzero if within a ({...}) grouping, in which case we must
366 always compute a value for each expr-stmt in case it is the last one. */
367 int x_expr_stmts_for_value;
369 /* Location of last line-number note, whether we actually
370 emitted it or not. */
371 location_t x_emit_locus;
373 struct goto_fixup *x_goto_fixup_chain;
376 #define block_stack (cfun->stmt->x_block_stack)
377 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
378 #define cond_stack (cfun->stmt->x_cond_stack)
379 #define loop_stack (cfun->stmt->x_loop_stack)
380 #define case_stack (cfun->stmt->x_case_stack)
381 #define nesting_stack (cfun->stmt->x_nesting_stack)
382 #define nesting_depth (cfun->stmt->x_nesting_depth)
383 #define current_block_start_count (cfun->stmt->x_block_start_count)
384 #define last_expr_type (cfun->stmt->x_last_expr_type)
385 #define last_expr_value (cfun->stmt->x_last_expr_value)
386 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
387 #define emit_locus (cfun->stmt->x_emit_locus)
388 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
390 /* Nonzero if we are using EH to handle cleanups. */
391 static int using_eh_for_cleanups_p = 0;
393 static int n_occurrences (int, const char *);
394 static bool parse_input_constraint (const char **, int, int, int, int,
395 const char * const *, bool *, bool *);
396 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
397 static void expand_goto_internal (tree, rtx, rtx);
398 static int expand_fixup (tree, rtx, rtx);
399 static rtx expand_nl_handler_label (rtx, rtx);
400 static void expand_nl_goto_receiver (void);
401 static void expand_nl_goto_receivers (struct nesting *);
402 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
403 static bool check_operand_nalternatives (tree, tree);
404 static bool check_unique_operand_names (tree, tree);
405 static char *resolve_operand_name_1 (char *, tree, tree);
406 static void expand_null_return_1 (rtx);
407 static enum br_predictor return_prediction (rtx);
408 static void expand_value_return (rtx);
409 static int tail_recursion_args (tree, tree);
410 static void expand_cleanups (tree, int, int);
411 static void check_seenlabel (void);
412 static void do_jump_if_equal (rtx, rtx, rtx, int);
413 static int estimate_case_costs (case_node_ptr);
414 static bool same_case_target_p (rtx, rtx);
415 static void strip_default_case_nodes (case_node_ptr *, rtx);
416 static bool lshift_cheap_p (void);
417 static int case_bit_test_cmp (const void *, const void *);
418 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
419 static void group_case_nodes (case_node_ptr);
420 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
421 static int node_has_low_bound (case_node_ptr, tree);
422 static int node_has_high_bound (case_node_ptr, tree);
423 static int node_is_bounded (case_node_ptr, tree);
424 static void emit_jump_if_reachable (rtx);
425 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
426 static struct case_node *case_tree2list (case_node *, case_node *);
428 void
429 using_eh_for_cleanups (void)
431 using_eh_for_cleanups_p = 1;
434 void
435 init_stmt_for_function (void)
437 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
440 /* Record the current file and line. Called from emit_line_note. */
442 void
443 set_file_and_line_for_stmt (location_t location)
445 /* If we're outputting an inline function, and we add a line note,
446 there may be no CFUN->STMT information. So, there's no need to
447 update it. */
448 if (cfun->stmt)
449 emit_locus = location;
452 /* Emit a no-op instruction. */
454 void
455 emit_nop (void)
457 rtx last_insn;
459 last_insn = get_last_insn ();
460 if (!optimize
461 && (GET_CODE (last_insn) == CODE_LABEL
462 || (GET_CODE (last_insn) == NOTE
463 && prev_real_insn (last_insn) == 0)))
464 emit_insn (gen_nop ());
467 /* Return the rtx-label that corresponds to a LABEL_DECL,
468 creating it if necessary. */
471 label_rtx (tree label)
473 if (TREE_CODE (label) != LABEL_DECL)
474 abort ();
476 if (!DECL_RTL_SET_P (label))
477 SET_DECL_RTL (label, gen_label_rtx ());
479 return DECL_RTL (label);
482 /* As above, but also put it on the forced-reference list of the
483 function that contains it. */
485 force_label_rtx (tree label)
487 rtx ref = label_rtx (label);
488 tree function = decl_function_context (label);
489 struct function *p;
491 if (!function)
492 abort ();
494 if (function != current_function_decl
495 && function != inline_function_decl)
496 p = find_function_data (function);
497 else
498 p = cfun;
500 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
501 p->expr->x_forced_labels);
502 return ref;
505 /* Add an unconditional jump to LABEL as the next sequential instruction. */
507 void
508 emit_jump (rtx label)
510 do_pending_stack_adjust ();
511 emit_jump_insn (gen_jump (label));
512 emit_barrier ();
515 /* Emit code to jump to the address
516 specified by the pointer expression EXP. */
518 void
519 expand_computed_goto (tree exp)
521 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
523 #ifdef POINTERS_EXTEND_UNSIGNED
524 if (GET_MODE (x) != Pmode)
525 x = convert_memory_address (Pmode, x);
526 #endif
528 emit_queue ();
530 if (! cfun->computed_goto_common_label)
532 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
533 cfun->computed_goto_common_label = gen_label_rtx ();
534 emit_label (cfun->computed_goto_common_label);
536 do_pending_stack_adjust ();
537 emit_indirect_jump (cfun->computed_goto_common_reg);
539 current_function_has_computed_jump = 1;
541 else
543 emit_move_insn (cfun->computed_goto_common_reg, x);
544 emit_jump (cfun->computed_goto_common_label);
548 /* Handle goto statements and the labels that they can go to. */
550 /* Specify the location in the RTL code of a label LABEL,
551 which is a LABEL_DECL tree node.
553 This is used for the kind of label that the user can jump to with a
554 goto statement, and for alternatives of a switch or case statement.
555 RTL labels generated for loops and conditionals don't go through here;
556 they are generated directly at the RTL level, by other functions below.
558 Note that this has nothing to do with defining label *names*.
559 Languages vary in how they do that and what that even means. */
561 void
562 expand_label (tree label)
564 struct label_chain *p;
566 do_pending_stack_adjust ();
567 emit_label (label_rtx (label));
568 if (DECL_NAME (label))
569 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
571 if (stack_block_stack != 0)
573 p = ggc_alloc (sizeof (struct label_chain));
574 p->next = stack_block_stack->data.block.label_chain;
575 stack_block_stack->data.block.label_chain = p;
576 p->label = label;
580 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
581 from nested functions. */
583 void
584 declare_nonlocal_label (tree label)
586 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
588 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
589 LABEL_PRESERVE_P (label_rtx (label)) = 1;
590 if (nonlocal_goto_handler_slots == 0)
592 emit_stack_save (SAVE_NONLOCAL,
593 &nonlocal_goto_stack_level,
594 PREV_INSN (tail_recursion_reentry));
596 nonlocal_goto_handler_slots
597 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
600 /* Generate RTL code for a `goto' statement with target label LABEL.
601 LABEL should be a LABEL_DECL tree node that was or will later be
602 defined with `expand_label'. */
604 void
605 expand_goto (tree label)
607 tree context;
609 /* Check for a nonlocal goto to a containing function. */
610 context = decl_function_context (label);
611 if (context != 0 && context != current_function_decl)
613 struct function *p = find_function_data (context);
614 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
615 rtx handler_slot, static_chain, save_area, insn;
616 tree link;
618 /* Find the corresponding handler slot for this label. */
619 handler_slot = p->x_nonlocal_goto_handler_slots;
620 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
621 link = TREE_CHAIN (link))
622 handler_slot = XEXP (handler_slot, 1);
623 handler_slot = XEXP (handler_slot, 0);
625 p->has_nonlocal_label = 1;
626 current_function_has_nonlocal_goto = 1;
627 LABEL_REF_NONLOCAL_P (label_ref) = 1;
629 /* Copy the rtl for the slots so that they won't be shared in
630 case the virtual stack vars register gets instantiated differently
631 in the parent than in the child. */
633 static_chain = copy_to_reg (lookup_static_chain (label));
635 /* Get addr of containing function's current nonlocal goto handler,
636 which will do any cleanups and then jump to the label. */
637 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
638 virtual_stack_vars_rtx,
639 static_chain));
641 /* Get addr of containing function's nonlocal save area. */
642 save_area = p->x_nonlocal_goto_stack_level;
643 if (save_area)
644 save_area = replace_rtx (copy_rtx (save_area),
645 virtual_stack_vars_rtx, static_chain);
647 #if HAVE_nonlocal_goto
648 if (HAVE_nonlocal_goto)
649 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
650 save_area, label_ref));
651 else
652 #endif
654 /* Restore frame pointer for containing function.
655 This sets the actual hard register used for the frame pointer
656 to the location of the function's incoming static chain info.
657 The non-local goto handler will then adjust it to contain the
658 proper value and reload the argument pointer, if needed. */
659 emit_move_insn (hard_frame_pointer_rtx, static_chain);
660 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
662 /* USE of hard_frame_pointer_rtx added for consistency;
663 not clear if really needed. */
664 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
665 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
666 emit_indirect_jump (handler_slot);
669 /* Search backwards to the jump insn and mark it as a
670 non-local goto. */
671 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
673 if (GET_CODE (insn) == JUMP_INSN)
675 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
676 const0_rtx, REG_NOTES (insn));
677 break;
679 else if (GET_CODE (insn) == CALL_INSN)
680 break;
683 else
684 expand_goto_internal (label, label_rtx (label), NULL_RTX);
687 /* Generate RTL code for a `goto' statement with target label BODY.
688 LABEL should be a LABEL_REF.
689 LAST_INSN, if non-0, is the rtx we should consider as the last
690 insn emitted (for the purposes of cleaning up a return). */
692 static void
693 expand_goto_internal (tree body, rtx label, rtx last_insn)
695 struct nesting *block;
696 rtx stack_level = 0;
698 if (GET_CODE (label) != CODE_LABEL)
699 abort ();
701 /* If label has already been defined, we can tell now
702 whether and how we must alter the stack level. */
704 if (PREV_INSN (label) != 0)
706 /* Find the innermost pending block that contains the label.
707 (Check containment by comparing insn-uids.)
708 Then restore the outermost stack level within that block,
709 and do cleanups of all blocks contained in it. */
710 for (block = block_stack; block; block = block->next)
712 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
713 break;
714 if (block->data.block.stack_level != 0)
715 stack_level = block->data.block.stack_level;
716 /* Execute the cleanups for blocks we are exiting. */
717 if (block->data.block.cleanups != 0)
719 expand_cleanups (block->data.block.cleanups, 1, 1);
720 do_pending_stack_adjust ();
724 if (stack_level)
726 /* Ensure stack adjust isn't done by emit_jump, as this
727 would clobber the stack pointer. This one should be
728 deleted as dead by flow. */
729 clear_pending_stack_adjust ();
730 do_pending_stack_adjust ();
732 /* Don't do this adjust if it's to the end label and this function
733 is to return with a depressed stack pointer. */
734 if (label == return_label
735 && (((TREE_CODE (TREE_TYPE (current_function_decl))
736 == FUNCTION_TYPE)
737 && (TYPE_RETURNS_STACK_DEPRESSED
738 (TREE_TYPE (current_function_decl))))))
740 else
741 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
744 if (body != 0 && DECL_TOO_LATE (body))
745 error ("jump to `%s' invalidly jumps into binding contour",
746 IDENTIFIER_POINTER (DECL_NAME (body)));
748 /* Label not yet defined: may need to put this goto
749 on the fixup list. */
750 else if (! expand_fixup (body, label, last_insn))
752 /* No fixup needed. Record that the label is the target
753 of at least one goto that has no fixup. */
754 if (body != 0)
755 TREE_ADDRESSABLE (body) = 1;
758 emit_jump (label);
761 /* Generate if necessary a fixup for a goto
762 whose target label in tree structure (if any) is TREE_LABEL
763 and whose target in rtl is RTL_LABEL.
765 If LAST_INSN is nonzero, we pretend that the jump appears
766 after insn LAST_INSN instead of at the current point in the insn stream.
768 The fixup will be used later to insert insns just before the goto.
769 Those insns will restore the stack level as appropriate for the
770 target label, and will (in the case of C++) also invoke any object
771 destructors which have to be invoked when we exit the scopes which
772 are exited by the goto.
774 Value is nonzero if a fixup is made. */
776 static int
777 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
779 struct nesting *block, *end_block;
781 /* See if we can recognize which block the label will be output in.
782 This is possible in some very common cases.
783 If we succeed, set END_BLOCK to that block.
784 Otherwise, set it to 0. */
786 if (cond_stack
787 && (rtl_label == cond_stack->data.cond.endif_label
788 || rtl_label == cond_stack->data.cond.next_label))
789 end_block = cond_stack;
790 /* If we are in a loop, recognize certain labels which
791 are likely targets. This reduces the number of fixups
792 we need to create. */
793 else if (loop_stack
794 && (rtl_label == loop_stack->data.loop.start_label
795 || rtl_label == loop_stack->data.loop.end_label
796 || rtl_label == loop_stack->data.loop.continue_label))
797 end_block = loop_stack;
798 else
799 end_block = 0;
801 /* Now set END_BLOCK to the binding level to which we will return. */
803 if (end_block)
805 struct nesting *next_block = end_block->all;
806 block = block_stack;
808 /* First see if the END_BLOCK is inside the innermost binding level.
809 If so, then no cleanups or stack levels are relevant. */
810 while (next_block && next_block != block)
811 next_block = next_block->all;
813 if (next_block)
814 return 0;
816 /* Otherwise, set END_BLOCK to the innermost binding level
817 which is outside the relevant control-structure nesting. */
818 next_block = block_stack->next;
819 for (block = block_stack; block != end_block; block = block->all)
820 if (block == next_block)
821 next_block = next_block->next;
822 end_block = next_block;
825 /* Does any containing block have a stack level or cleanups?
826 If not, no fixup is needed, and that is the normal case
827 (the only case, for standard C). */
828 for (block = block_stack; block != end_block; block = block->next)
829 if (block->data.block.stack_level != 0
830 || block->data.block.cleanups != 0)
831 break;
833 if (block != end_block)
835 /* Ok, a fixup is needed. Add a fixup to the list of such. */
836 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
837 /* In case an old stack level is restored, make sure that comes
838 after any pending stack adjust. */
839 /* ?? If the fixup isn't to come at the present position,
840 doing the stack adjust here isn't useful. Doing it with our
841 settings at that location isn't useful either. Let's hope
842 someone does it! */
843 if (last_insn == 0)
844 do_pending_stack_adjust ();
845 fixup->target = tree_label;
846 fixup->target_rtl = rtl_label;
848 /* Create a BLOCK node and a corresponding matched set of
849 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
850 this point. The notes will encapsulate any and all fixup
851 code which we might later insert at this point in the insn
852 stream. Also, the BLOCK node will be the parent (i.e. the
853 `SUPERBLOCK') of any other BLOCK nodes which we might create
854 later on when we are expanding the fixup code.
856 Note that optimization passes (including expand_end_loop)
857 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
858 as a placeholder. */
861 rtx original_before_jump
862 = last_insn ? last_insn : get_last_insn ();
863 rtx start;
864 rtx end;
865 tree block;
867 block = make_node (BLOCK);
868 TREE_USED (block) = 1;
870 if (!cfun->x_whole_function_mode_p)
871 (*lang_hooks.decls.insert_block) (block);
872 else
874 BLOCK_CHAIN (block)
875 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
876 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
877 = block;
880 start_sequence ();
881 start = emit_note (NOTE_INSN_BLOCK_BEG);
882 if (cfun->x_whole_function_mode_p)
883 NOTE_BLOCK (start) = block;
884 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
885 end = emit_note (NOTE_INSN_BLOCK_END);
886 if (cfun->x_whole_function_mode_p)
887 NOTE_BLOCK (end) = block;
888 fixup->context = block;
889 end_sequence ();
890 emit_insn_after (start, original_before_jump);
893 fixup->block_start_count = current_block_start_count;
894 fixup->stack_level = 0;
895 fixup->cleanup_list_list
896 = ((block->data.block.outer_cleanups
897 || block->data.block.cleanups)
898 ? tree_cons (NULL_TREE, block->data.block.cleanups,
899 block->data.block.outer_cleanups)
900 : 0);
901 fixup->next = goto_fixup_chain;
902 goto_fixup_chain = fixup;
905 return block != 0;
908 /* Expand any needed fixups in the outputmost binding level of the
909 function. FIRST_INSN is the first insn in the function. */
911 void
912 expand_fixups (rtx first_insn)
914 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
917 /* When exiting a binding contour, process all pending gotos requiring fixups.
918 THISBLOCK is the structure that describes the block being exited.
919 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
920 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
921 FIRST_INSN is the insn that began this contour.
923 Gotos that jump out of this contour must restore the
924 stack level and do the cleanups before actually jumping.
926 DONT_JUMP_IN positive means report error if there is a jump into this
927 contour from before the beginning of the contour. This is also done if
928 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
930 static void
931 fixup_gotos (struct nesting *thisblock, rtx stack_level,
932 tree cleanup_list, rtx first_insn, int dont_jump_in)
934 struct goto_fixup *f, *prev;
936 /* F is the fixup we are considering; PREV is the previous one. */
937 /* We run this loop in two passes so that cleanups of exited blocks
938 are run first, and blocks that are exited are marked so
939 afterwards. */
941 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
943 /* Test for a fixup that is inactive because it is already handled. */
944 if (f->before_jump == 0)
946 /* Delete inactive fixup from the chain, if that is easy to do. */
947 if (prev != 0)
948 prev->next = f->next;
950 /* Has this fixup's target label been defined?
951 If so, we can finalize it. */
952 else if (PREV_INSN (f->target_rtl) != 0)
954 rtx cleanup_insns;
956 /* If this fixup jumped into this contour from before the beginning
957 of this contour, report an error. This code used to use
958 the first non-label insn after f->target_rtl, but that's
959 wrong since such can be added, by things like put_var_into_stack
960 and have INSN_UIDs that are out of the range of the block. */
961 /* ??? Bug: this does not detect jumping in through intermediate
962 blocks that have stack levels or cleanups.
963 It detects only a problem with the innermost block
964 around the label. */
965 if (f->target != 0
966 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
967 || cleanup_list)
968 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
969 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
970 && ! DECL_ERROR_ISSUED (f->target))
972 error ("%Hlabel '%D' used before containing binding contour",
973 &DECL_SOURCE_LOCATION (f->target), f->target);
974 /* Prevent multiple errors for one label. */
975 DECL_ERROR_ISSUED (f->target) = 1;
978 /* We will expand the cleanups into a sequence of their own and
979 then later on we will attach this new sequence to the insn
980 stream just ahead of the actual jump insn. */
982 start_sequence ();
984 /* Temporarily restore the lexical context where we will
985 logically be inserting the fixup code. We do this for the
986 sake of getting the debugging information right. */
988 (*lang_hooks.decls.pushlevel) (0);
989 (*lang_hooks.decls.set_block) (f->context);
991 /* Expand the cleanups for blocks this jump exits. */
992 if (f->cleanup_list_list)
994 tree lists;
995 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
996 /* Marked elements correspond to blocks that have been closed.
997 Do their cleanups. */
998 if (TREE_ADDRESSABLE (lists)
999 && TREE_VALUE (lists) != 0)
1001 expand_cleanups (TREE_VALUE (lists), 1, 1);
1002 /* Pop any pushes done in the cleanups,
1003 in case function is about to return. */
1004 do_pending_stack_adjust ();
1008 /* Restore stack level for the biggest contour that this
1009 jump jumps out of. */
1010 if (f->stack_level
1011 && ! (f->target_rtl == return_label
1012 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1013 == FUNCTION_TYPE)
1014 && (TYPE_RETURNS_STACK_DEPRESSED
1015 (TREE_TYPE (current_function_decl))))))
1016 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1018 /* Finish up the sequence containing the insns which implement the
1019 necessary cleanups, and then attach that whole sequence to the
1020 insn stream just ahead of the actual jump insn. Attaching it
1021 at that point insures that any cleanups which are in fact
1022 implicit C++ object destructions (which must be executed upon
1023 leaving the block) appear (to the debugger) to be taking place
1024 in an area of the generated code where the object(s) being
1025 destructed are still "in scope". */
1027 cleanup_insns = get_insns ();
1028 (*lang_hooks.decls.poplevel) (1, 0, 0);
1030 end_sequence ();
1031 emit_insn_after (cleanup_insns, f->before_jump);
1033 f->before_jump = 0;
1037 /* For any still-undefined labels, do the cleanups for this block now.
1038 We must do this now since items in the cleanup list may go out
1039 of scope when the block ends. */
1040 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1041 if (f->before_jump != 0
1042 && PREV_INSN (f->target_rtl) == 0
1043 /* Label has still not appeared. If we are exiting a block with
1044 a stack level to restore, that started before the fixup,
1045 mark this stack level as needing restoration
1046 when the fixup is later finalized. */
1047 && thisblock != 0
1048 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1049 means the label is undefined. That's erroneous, but possible. */
1050 && (thisblock->data.block.block_start_count
1051 <= f->block_start_count))
1053 tree lists = f->cleanup_list_list;
1054 rtx cleanup_insns;
1056 for (; lists; lists = TREE_CHAIN (lists))
1057 /* If the following elt. corresponds to our containing block
1058 then the elt. must be for this block. */
1059 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1061 start_sequence ();
1062 (*lang_hooks.decls.pushlevel) (0);
1063 (*lang_hooks.decls.set_block) (f->context);
1064 expand_cleanups (TREE_VALUE (lists), 1, 1);
1065 do_pending_stack_adjust ();
1066 cleanup_insns = get_insns ();
1067 (*lang_hooks.decls.poplevel) (1, 0, 0);
1068 end_sequence ();
1069 if (cleanup_insns != 0)
1070 f->before_jump
1071 = emit_insn_after (cleanup_insns, f->before_jump);
1073 f->cleanup_list_list = TREE_CHAIN (lists);
1076 if (stack_level)
1077 f->stack_level = stack_level;
1081 /* Return the number of times character C occurs in string S. */
1082 static int
1083 n_occurrences (int c, const char *s)
1085 int n = 0;
1086 while (*s)
1087 n += (*s++ == c);
1088 return n;
1091 /* Generate RTL for an asm statement (explicit assembler code).
1092 STRING is a STRING_CST node containing the assembler code text,
1093 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1094 insn is volatile; don't optimize it. */
1096 void
1097 expand_asm (tree string, int vol)
1099 rtx body;
1101 if (TREE_CODE (string) == ADDR_EXPR)
1102 string = TREE_OPERAND (string, 0);
1104 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1106 MEM_VOLATILE_P (body) = vol;
1108 emit_insn (body);
1110 clear_last_expr ();
1113 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1114 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1115 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1116 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1117 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1118 constraint allows the use of a register operand. And, *IS_INOUT
1119 will be true if the operand is read-write, i.e., if it is used as
1120 an input as well as an output. If *CONSTRAINT_P is not in
1121 canonical form, it will be made canonical. (Note that `+' will be
1122 replaced with `=' as part of this process.)
1124 Returns TRUE if all went well; FALSE if an error occurred. */
1126 bool
1127 parse_output_constraint (const char **constraint_p, int operand_num,
1128 int ninputs, int noutputs, bool *allows_mem,
1129 bool *allows_reg, bool *is_inout)
1131 const char *constraint = *constraint_p;
1132 const char *p;
1134 /* Assume the constraint doesn't allow the use of either a register
1135 or memory. */
1136 *allows_mem = false;
1137 *allows_reg = false;
1139 /* Allow the `=' or `+' to not be at the beginning of the string,
1140 since it wasn't explicitly documented that way, and there is a
1141 large body of code that puts it last. Swap the character to
1142 the front, so as not to uglify any place else. */
1143 p = strchr (constraint, '=');
1144 if (!p)
1145 p = strchr (constraint, '+');
1147 /* If the string doesn't contain an `=', issue an error
1148 message. */
1149 if (!p)
1151 error ("output operand constraint lacks `='");
1152 return false;
1155 /* If the constraint begins with `+', then the operand is both read
1156 from and written to. */
1157 *is_inout = (*p == '+');
1159 /* Canonicalize the output constraint so that it begins with `='. */
1160 if (p != constraint || is_inout)
1162 char *buf;
1163 size_t c_len = strlen (constraint);
1165 if (p != constraint)
1166 warning ("output constraint `%c' for operand %d is not at the beginning",
1167 *p, operand_num);
1169 /* Make a copy of the constraint. */
1170 buf = alloca (c_len + 1);
1171 strcpy (buf, constraint);
1172 /* Swap the first character and the `=' or `+'. */
1173 buf[p - constraint] = buf[0];
1174 /* Make sure the first character is an `='. (Until we do this,
1175 it might be a `+'.) */
1176 buf[0] = '=';
1177 /* Replace the constraint with the canonicalized string. */
1178 *constraint_p = ggc_alloc_string (buf, c_len);
1179 constraint = *constraint_p;
1182 /* Loop through the constraint string. */
1183 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1184 switch (*p)
1186 case '+':
1187 case '=':
1188 error ("operand constraint contains incorrectly positioned '+' or '='");
1189 return false;
1191 case '%':
1192 if (operand_num + 1 == ninputs + noutputs)
1194 error ("`%%' constraint used with last operand");
1195 return false;
1197 break;
1199 case 'V': case 'm': case 'o':
1200 *allows_mem = true;
1201 break;
1203 case '?': case '!': case '*': case '&': case '#':
1204 case 'E': case 'F': case 'G': case 'H':
1205 case 's': case 'i': case 'n':
1206 case 'I': case 'J': case 'K': case 'L': case 'M':
1207 case 'N': case 'O': case 'P': case ',':
1208 break;
1210 case '0': case '1': case '2': case '3': case '4':
1211 case '5': case '6': case '7': case '8': case '9':
1212 case '[':
1213 error ("matching constraint not valid in output operand");
1214 return false;
1216 case '<': case '>':
1217 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1218 excepting those that expand_call created. So match memory
1219 and hope. */
1220 *allows_mem = true;
1221 break;
1223 case 'g': case 'X':
1224 *allows_reg = true;
1225 *allows_mem = true;
1226 break;
1228 case 'p': case 'r':
1229 *allows_reg = true;
1230 break;
1232 default:
1233 if (!ISALPHA (*p))
1234 break;
1235 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1236 *allows_reg = true;
1237 #ifdef EXTRA_CONSTRAINT_STR
1238 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1239 *allows_reg = true;
1240 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1241 *allows_mem = true;
1242 else
1244 /* Otherwise we can't assume anything about the nature of
1245 the constraint except that it isn't purely registers.
1246 Treat it like "g" and hope for the best. */
1247 *allows_reg = true;
1248 *allows_mem = true;
1250 #endif
1251 break;
1254 return true;
1257 /* Similar, but for input constraints. */
1259 static bool
1260 parse_input_constraint (const char **constraint_p, int input_num,
1261 int ninputs, int noutputs, int ninout,
1262 const char * const * constraints,
1263 bool *allows_mem, bool *allows_reg)
1265 const char *constraint = *constraint_p;
1266 const char *orig_constraint = constraint;
1267 size_t c_len = strlen (constraint);
1268 size_t j;
1270 /* Assume the constraint doesn't allow the use of either
1271 a register or memory. */
1272 *allows_mem = false;
1273 *allows_reg = false;
1275 /* Make sure constraint has neither `=', `+', nor '&'. */
1277 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1278 switch (constraint[j])
1280 case '+': case '=': case '&':
1281 if (constraint == orig_constraint)
1283 error ("input operand constraint contains `%c'", constraint[j]);
1284 return false;
1286 break;
1288 case '%':
1289 if (constraint == orig_constraint
1290 && input_num + 1 == ninputs - ninout)
1292 error ("`%%' constraint used with last operand");
1293 return false;
1295 break;
1297 case 'V': case 'm': case 'o':
1298 *allows_mem = true;
1299 break;
1301 case '<': case '>':
1302 case '?': case '!': case '*': case '#':
1303 case 'E': case 'F': case 'G': case 'H':
1304 case 's': case 'i': case 'n':
1305 case 'I': case 'J': case 'K': case 'L': case 'M':
1306 case 'N': case 'O': case 'P': case ',':
1307 break;
1309 /* Whether or not a numeric constraint allows a register is
1310 decided by the matching constraint, and so there is no need
1311 to do anything special with them. We must handle them in
1312 the default case, so that we don't unnecessarily force
1313 operands to memory. */
1314 case '0': case '1': case '2': case '3': case '4':
1315 case '5': case '6': case '7': case '8': case '9':
1317 char *end;
1318 unsigned long match;
1320 match = strtoul (constraint + j, &end, 10);
1321 if (match >= (unsigned long) noutputs)
1323 error ("matching constraint references invalid operand number");
1324 return false;
1327 /* Try and find the real constraint for this dup. Only do this
1328 if the matching constraint is the only alternative. */
1329 if (*end == '\0'
1330 && (j == 0 || (j == 1 && constraint[0] == '%')))
1332 constraint = constraints[match];
1333 *constraint_p = constraint;
1334 c_len = strlen (constraint);
1335 j = 0;
1336 /* ??? At the end of the loop, we will skip the first part of
1337 the matched constraint. This assumes not only that the
1338 other constraint is an output constraint, but also that
1339 the '=' or '+' come first. */
1340 break;
1342 else
1343 j = end - constraint;
1344 /* Anticipate increment at end of loop. */
1345 j--;
1347 /* Fall through. */
1349 case 'p': case 'r':
1350 *allows_reg = true;
1351 break;
1353 case 'g': case 'X':
1354 *allows_reg = true;
1355 *allows_mem = true;
1356 break;
1358 default:
1359 if (! ISALPHA (constraint[j]))
1361 error ("invalid punctuation `%c' in constraint", constraint[j]);
1362 return false;
1364 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1365 != NO_REGS)
1366 *allows_reg = true;
1367 #ifdef EXTRA_CONSTRAINT_STR
1368 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1369 *allows_reg = true;
1370 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1371 *allows_mem = true;
1372 else
1374 /* Otherwise we can't assume anything about the nature of
1375 the constraint except that it isn't purely registers.
1376 Treat it like "g" and hope for the best. */
1377 *allows_reg = true;
1378 *allows_mem = true;
1380 #endif
1381 break;
1384 return true;
1387 /* Check for overlap between registers marked in CLOBBERED_REGS and
1388 anything inappropriate in DECL. Emit error and return TRUE for error,
1389 FALSE for ok. */
1391 static bool
1392 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1394 /* Conflicts between asm-declared register variables and the clobber
1395 list are not allowed. */
1396 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1397 && DECL_REGISTER (decl)
1398 && REG_P (DECL_RTL (decl))
1399 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1401 rtx reg = DECL_RTL (decl);
1402 unsigned int regno;
1404 for (regno = REGNO (reg);
1405 regno < (REGNO (reg)
1406 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1407 regno++)
1408 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1410 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1411 IDENTIFIER_POINTER (DECL_NAME (decl)));
1413 /* Reset registerness to stop multiple errors emitted for a
1414 single variable. */
1415 DECL_REGISTER (decl) = 0;
1416 return true;
1419 return false;
1422 /* Generate RTL for an asm statement with arguments.
1423 STRING is the instruction template.
1424 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1425 Each output or input has an expression in the TREE_VALUE and
1426 and a tree list in TREE_PURPOSE which in turn contains a constraint
1427 name in TREE_VALUE (or NULL_TREE) and a constraint string
1428 in TREE_PURPOSE.
1429 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1430 that is clobbered by this insn.
1432 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1433 Some elements of OUTPUTS may be replaced with trees representing temporary
1434 values. The caller should copy those temporary values to the originally
1435 specified lvalues.
1437 VOL nonzero means the insn is volatile; don't optimize it. */
1439 void
1440 expand_asm_operands (tree string, tree outputs, tree inputs,
1441 tree clobbers, int vol, const char *filename, int line)
1443 rtvec argvec, constraintvec;
1444 rtx body;
1445 int ninputs = list_length (inputs);
1446 int noutputs = list_length (outputs);
1447 int ninout;
1448 int nclobbers;
1449 HARD_REG_SET clobbered_regs;
1450 int clobber_conflict_found = 0;
1451 tree tail;
1452 tree t;
1453 int i;
1454 /* Vector of RTX's of evaluated output operands. */
1455 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1456 int *inout_opnum = alloca (noutputs * sizeof (int));
1457 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1458 enum machine_mode *inout_mode
1459 = alloca (noutputs * sizeof (enum machine_mode));
1460 const char **constraints
1461 = alloca ((noutputs + ninputs) * sizeof (const char *));
1462 int old_generating_concat_p = generating_concat_p;
1464 /* An ASM with no outputs needs to be treated as volatile, for now. */
1465 if (noutputs == 0)
1466 vol = 1;
1468 if (! check_operand_nalternatives (outputs, inputs))
1469 return;
1471 if (! check_unique_operand_names (outputs, inputs))
1472 return;
1474 string = resolve_asm_operand_names (string, outputs, inputs);
1476 /* Collect constraints. */
1477 i = 0;
1478 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1479 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1480 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1481 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1483 #ifdef MD_ASM_CLOBBERS
1484 /* Sometimes we wish to automatically clobber registers across an asm.
1485 Case in point is when the i386 backend moved from cc0 to a hard reg --
1486 maintaining source-level compatibility means automatically clobbering
1487 the flags register. */
1488 MD_ASM_CLOBBERS (clobbers);
1489 #endif
1491 /* Count the number of meaningful clobbered registers, ignoring what
1492 we would ignore later. */
1493 nclobbers = 0;
1494 CLEAR_HARD_REG_SET (clobbered_regs);
1495 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1497 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1499 i = decode_reg_name (regname);
1500 if (i >= 0 || i == -4)
1501 ++nclobbers;
1502 else if (i == -2)
1503 error ("unknown register name `%s' in `asm'", regname);
1505 /* Mark clobbered registers. */
1506 if (i >= 0)
1508 /* Clobbering the PIC register is an error */
1509 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1511 error ("PIC register `%s' clobbered in `asm'", regname);
1512 return;
1515 SET_HARD_REG_BIT (clobbered_regs, i);
1519 clear_last_expr ();
1521 /* First pass over inputs and outputs checks validity and sets
1522 mark_addressable if needed. */
1524 ninout = 0;
1525 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1527 tree val = TREE_VALUE (tail);
1528 tree type = TREE_TYPE (val);
1529 const char *constraint;
1530 bool is_inout;
1531 bool allows_reg;
1532 bool allows_mem;
1534 /* If there's an erroneous arg, emit no insn. */
1535 if (type == error_mark_node)
1536 return;
1538 /* Try to parse the output constraint. If that fails, there's
1539 no point in going further. */
1540 constraint = constraints[i];
1541 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1542 &allows_mem, &allows_reg, &is_inout))
1543 return;
1545 if (! allows_reg
1546 && (allows_mem
1547 || is_inout
1548 || (DECL_P (val)
1549 && GET_CODE (DECL_RTL (val)) == REG
1550 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1551 (*lang_hooks.mark_addressable) (val);
1553 if (is_inout)
1554 ninout++;
1557 ninputs += ninout;
1558 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1560 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1561 return;
1564 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1566 bool allows_reg, allows_mem;
1567 const char *constraint;
1569 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1570 would get VOIDmode and that could cause a crash in reload. */
1571 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1572 return;
1574 constraint = constraints[i + noutputs];
1575 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1576 constraints, &allows_mem, &allows_reg))
1577 return;
1579 if (! allows_reg && allows_mem)
1580 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1583 /* Second pass evaluates arguments. */
1585 ninout = 0;
1586 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1588 tree val = TREE_VALUE (tail);
1589 tree type = TREE_TYPE (val);
1590 bool is_inout;
1591 bool allows_reg;
1592 bool allows_mem;
1593 rtx op;
1595 if (!parse_output_constraint (&constraints[i], i, ninputs,
1596 noutputs, &allows_mem, &allows_reg,
1597 &is_inout))
1598 abort ();
1600 /* If an output operand is not a decl or indirect ref and our constraint
1601 allows a register, make a temporary to act as an intermediate.
1602 Make the asm insn write into that, then our caller will copy it to
1603 the real output operand. Likewise for promoted variables. */
1605 generating_concat_p = 0;
1607 real_output_rtx[i] = NULL_RTX;
1608 if ((TREE_CODE (val) == INDIRECT_REF
1609 && allows_mem)
1610 || (DECL_P (val)
1611 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1612 && ! (GET_CODE (DECL_RTL (val)) == REG
1613 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1614 || ! allows_reg
1615 || is_inout)
1617 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1618 if (GET_CODE (op) == MEM)
1619 op = validize_mem (op);
1621 if (! allows_reg && GET_CODE (op) != MEM)
1622 error ("output number %d not directly addressable", i);
1623 if ((! allows_mem && GET_CODE (op) == MEM)
1624 || GET_CODE (op) == CONCAT)
1626 real_output_rtx[i] = protect_from_queue (op, 1);
1627 op = gen_reg_rtx (GET_MODE (op));
1628 if (is_inout)
1629 emit_move_insn (op, real_output_rtx[i]);
1632 else
1634 op = assign_temp (type, 0, 0, 1);
1635 op = validize_mem (op);
1636 TREE_VALUE (tail) = make_tree (type, op);
1638 output_rtx[i] = op;
1640 generating_concat_p = old_generating_concat_p;
1642 if (is_inout)
1644 inout_mode[ninout] = TYPE_MODE (type);
1645 inout_opnum[ninout++] = i;
1648 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1649 clobber_conflict_found = 1;
1652 /* Make vectors for the expression-rtx, constraint strings,
1653 and named operands. */
1655 argvec = rtvec_alloc (ninputs);
1656 constraintvec = rtvec_alloc (ninputs);
1658 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1659 : GET_MODE (output_rtx[0])),
1660 TREE_STRING_POINTER (string),
1661 empty_string, 0, argvec, constraintvec,
1662 filename, line);
1664 MEM_VOLATILE_P (body) = vol;
1666 /* Eval the inputs and put them into ARGVEC.
1667 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1669 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1671 bool allows_reg, allows_mem;
1672 const char *constraint;
1673 tree val, type;
1674 rtx op;
1676 constraint = constraints[i + noutputs];
1677 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1678 constraints, &allows_mem, &allows_reg))
1679 abort ();
1681 generating_concat_p = 0;
1683 val = TREE_VALUE (tail);
1684 type = TREE_TYPE (val);
1685 op = expand_expr (val, NULL_RTX, VOIDmode,
1686 (allows_mem && !allows_reg
1687 ? EXPAND_MEMORY : EXPAND_NORMAL));
1689 /* Never pass a CONCAT to an ASM. */
1690 if (GET_CODE (op) == CONCAT)
1691 op = force_reg (GET_MODE (op), op);
1692 else if (GET_CODE (op) == MEM)
1693 op = validize_mem (op);
1695 if (asm_operand_ok (op, constraint) <= 0)
1697 if (allows_reg)
1698 op = force_reg (TYPE_MODE (type), op);
1699 else if (!allows_mem)
1700 warning ("asm operand %d probably doesn't match constraints",
1701 i + noutputs);
1702 else if (GET_CODE (op) == MEM)
1704 /* We won't recognize either volatile memory or memory
1705 with a queued address as available a memory_operand
1706 at this point. Ignore it: clearly this *is* a memory. */
1708 else
1710 warning ("use of memory input without lvalue in "
1711 "asm operand %d is deprecated", i + noutputs);
1713 if (CONSTANT_P (op))
1715 op = force_const_mem (TYPE_MODE (type), op);
1716 op = validize_mem (op);
1718 else if (GET_CODE (op) == REG
1719 || GET_CODE (op) == SUBREG
1720 || GET_CODE (op) == ADDRESSOF
1721 || GET_CODE (op) == CONCAT)
1723 tree qual_type = build_qualified_type (type,
1724 (TYPE_QUALS (type)
1725 | TYPE_QUAL_CONST));
1726 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1727 memloc = validize_mem (memloc);
1728 emit_move_insn (memloc, op);
1729 op = memloc;
1734 generating_concat_p = old_generating_concat_p;
1735 ASM_OPERANDS_INPUT (body, i) = op;
1737 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1738 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1740 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1741 clobber_conflict_found = 1;
1744 /* Protect all the operands from the queue now that they have all been
1745 evaluated. */
1747 generating_concat_p = 0;
1749 for (i = 0; i < ninputs - ninout; i++)
1750 ASM_OPERANDS_INPUT (body, i)
1751 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1753 for (i = 0; i < noutputs; i++)
1754 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1756 /* For in-out operands, copy output rtx to input rtx. */
1757 for (i = 0; i < ninout; i++)
1759 int j = inout_opnum[i];
1760 char buffer[16];
1762 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1763 = output_rtx[j];
1765 sprintf (buffer, "%d", j);
1766 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1767 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1770 generating_concat_p = old_generating_concat_p;
1772 /* Now, for each output, construct an rtx
1773 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1774 ARGVEC CONSTRAINTS OPNAMES))
1775 If there is more than one, put them inside a PARALLEL. */
1777 if (noutputs == 1 && nclobbers == 0)
1779 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1780 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1783 else if (noutputs == 0 && nclobbers == 0)
1785 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1786 emit_insn (body);
1789 else
1791 rtx obody = body;
1792 int num = noutputs;
1794 if (num == 0)
1795 num = 1;
1797 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1799 /* For each output operand, store a SET. */
1800 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1802 XVECEXP (body, 0, i)
1803 = gen_rtx_SET (VOIDmode,
1804 output_rtx[i],
1805 gen_rtx_ASM_OPERANDS
1806 (GET_MODE (output_rtx[i]),
1807 TREE_STRING_POINTER (string),
1808 constraints[i], i, argvec, constraintvec,
1809 filename, line));
1811 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1814 /* If there are no outputs (but there are some clobbers)
1815 store the bare ASM_OPERANDS into the PARALLEL. */
1817 if (i == 0)
1818 XVECEXP (body, 0, i++) = obody;
1820 /* Store (clobber REG) for each clobbered register specified. */
1822 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1824 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1825 int j = decode_reg_name (regname);
1826 rtx clobbered_reg;
1828 if (j < 0)
1830 if (j == -3) /* `cc', which is not a register */
1831 continue;
1833 if (j == -4) /* `memory', don't cache memory across asm */
1835 XVECEXP (body, 0, i++)
1836 = gen_rtx_CLOBBER (VOIDmode,
1837 gen_rtx_MEM
1838 (BLKmode,
1839 gen_rtx_SCRATCH (VOIDmode)));
1840 continue;
1843 /* Ignore unknown register, error already signaled. */
1844 continue;
1847 /* Use QImode since that's guaranteed to clobber just one reg. */
1848 clobbered_reg = gen_rtx_REG (QImode, j);
1850 /* Do sanity check for overlap between clobbers and respectively
1851 input and outputs that hasn't been handled. Such overlap
1852 should have been detected and reported above. */
1853 if (!clobber_conflict_found)
1855 int opno;
1857 /* We test the old body (obody) contents to avoid tripping
1858 over the under-construction body. */
1859 for (opno = 0; opno < noutputs; opno++)
1860 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1861 internal_error ("asm clobber conflict with output operand");
1863 for (opno = 0; opno < ninputs - ninout; opno++)
1864 if (reg_overlap_mentioned_p (clobbered_reg,
1865 ASM_OPERANDS_INPUT (obody, opno)))
1866 internal_error ("asm clobber conflict with input operand");
1869 XVECEXP (body, 0, i++)
1870 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1873 emit_insn (body);
1876 /* For any outputs that needed reloading into registers, spill them
1877 back to where they belong. */
1878 for (i = 0; i < noutputs; ++i)
1879 if (real_output_rtx[i])
1880 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1882 free_temp_slots ();
1885 /* A subroutine of expand_asm_operands. Check that all operands have
1886 the same number of alternatives. Return true if so. */
1888 static bool
1889 check_operand_nalternatives (tree outputs, tree inputs)
1891 if (outputs || inputs)
1893 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1894 int nalternatives
1895 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1896 tree next = inputs;
1898 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1900 error ("too many alternatives in `asm'");
1901 return false;
1904 tmp = outputs;
1905 while (tmp)
1907 const char *constraint
1908 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1910 if (n_occurrences (',', constraint) != nalternatives)
1912 error ("operand constraints for `asm' differ in number of alternatives");
1913 return false;
1916 if (TREE_CHAIN (tmp))
1917 tmp = TREE_CHAIN (tmp);
1918 else
1919 tmp = next, next = 0;
1923 return true;
1926 /* A subroutine of expand_asm_operands. Check that all operand names
1927 are unique. Return true if so. We rely on the fact that these names
1928 are identifiers, and so have been canonicalized by get_identifier,
1929 so all we need are pointer comparisons. */
1931 static bool
1932 check_unique_operand_names (tree outputs, tree inputs)
1934 tree i, j;
1936 for (i = outputs; i ; i = TREE_CHAIN (i))
1938 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1939 if (! i_name)
1940 continue;
1942 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1943 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1944 goto failure;
1947 for (i = inputs; i ; i = TREE_CHAIN (i))
1949 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1950 if (! i_name)
1951 continue;
1953 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1954 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1955 goto failure;
1956 for (j = outputs; j ; j = TREE_CHAIN (j))
1957 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1958 goto failure;
1961 return true;
1963 failure:
1964 error ("duplicate asm operand name '%s'",
1965 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1966 return false;
1969 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1970 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1971 STRING and in the constraints to those numbers. */
1973 tree
1974 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1976 char *buffer;
1977 char *p;
1978 const char *c;
1979 tree t;
1981 /* Substitute [<name>] in input constraint strings. There should be no
1982 named operands in output constraints. */
1983 for (t = inputs; t ; t = TREE_CHAIN (t))
1985 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1986 if (strchr (c, '[') != NULL)
1988 p = buffer = xstrdup (c);
1989 while ((p = strchr (p, '[')) != NULL)
1990 p = resolve_operand_name_1 (p, outputs, inputs);
1991 TREE_VALUE (TREE_PURPOSE (t))
1992 = build_string (strlen (buffer), buffer);
1993 free (buffer);
1997 /* Now check for any needed substitutions in the template. */
1998 c = TREE_STRING_POINTER (string);
1999 while ((c = strchr (c, '%')) != NULL)
2001 if (c[1] == '[')
2002 break;
2003 else if (ISALPHA (c[1]) && c[2] == '[')
2004 break;
2005 else
2007 c += 1;
2008 continue;
2012 if (c)
2014 /* OK, we need to make a copy so we can perform the substitutions.
2015 Assume that we will not need extra space--we get to remove '['
2016 and ']', which means we cannot have a problem until we have more
2017 than 999 operands. */
2018 buffer = xstrdup (TREE_STRING_POINTER (string));
2019 p = buffer + (c - TREE_STRING_POINTER (string));
2021 while ((p = strchr (p, '%')) != NULL)
2023 if (p[1] == '[')
2024 p += 1;
2025 else if (ISALPHA (p[1]) && p[2] == '[')
2026 p += 2;
2027 else
2029 p += 1;
2030 continue;
2033 p = resolve_operand_name_1 (p, outputs, inputs);
2036 string = build_string (strlen (buffer), buffer);
2037 free (buffer);
2040 return string;
2043 /* A subroutine of resolve_operand_names. P points to the '[' for a
2044 potential named operand of the form [<name>]. In place, replace
2045 the name and brackets with a number. Return a pointer to the
2046 balance of the string after substitution. */
2048 static char *
2049 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2051 char *q;
2052 int op;
2053 tree t;
2054 size_t len;
2056 /* Collect the operand name. */
2057 q = strchr (p, ']');
2058 if (!q)
2060 error ("missing close brace for named operand");
2061 return strchr (p, '\0');
2063 len = q - p - 1;
2065 /* Resolve the name to a number. */
2066 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2068 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2069 if (name)
2071 const char *c = TREE_STRING_POINTER (name);
2072 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2073 goto found;
2076 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2078 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2079 if (name)
2081 const char *c = TREE_STRING_POINTER (name);
2082 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2083 goto found;
2087 *q = '\0';
2088 error ("undefined named operand '%s'", p + 1);
2089 op = 0;
2090 found:
2092 /* Replace the name with the number. Unfortunately, not all libraries
2093 get the return value of sprintf correct, so search for the end of the
2094 generated string by hand. */
2095 sprintf (p, "%d", op);
2096 p = strchr (p, '\0');
2098 /* Verify the no extra buffer space assumption. */
2099 if (p > q)
2100 abort ();
2102 /* Shift the rest of the buffer down to fill the gap. */
2103 memmove (p, q + 1, strlen (q + 1) + 1);
2105 return p;
2108 /* Generate RTL to evaluate the expression EXP
2109 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2110 Provided just for backward-compatibility. expand_expr_stmt_value()
2111 should be used for new code. */
2113 void
2114 expand_expr_stmt (tree exp)
2116 expand_expr_stmt_value (exp, -1, 1);
2119 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2120 whether to (1) save the value of the expression, (0) discard it or
2121 (-1) use expr_stmts_for_value to tell. The use of -1 is
2122 deprecated, and retained only for backward compatibility. */
2124 void
2125 expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2127 rtx value;
2128 tree type;
2130 if (want_value == -1)
2131 want_value = expr_stmts_for_value != 0;
2133 /* If -Wextra, warn about statements with no side effects,
2134 except for an explicit cast to void (e.g. for assert()), and
2135 except for last statement in ({...}) where they may be useful. */
2136 if (! want_value
2137 && (expr_stmts_for_value == 0 || ! maybe_last)
2138 && exp != error_mark_node
2139 && warn_unused_value)
2141 if (TREE_SIDE_EFFECTS (exp))
2142 warn_if_unused_value (exp);
2143 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
2144 warning ("%Hstatement with no effect", &emit_locus);
2147 /* If EXP is of function type and we are expanding statements for
2148 value, convert it to pointer-to-function. */
2149 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2150 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2152 /* The call to `expand_expr' could cause last_expr_type and
2153 last_expr_value to get reset. Therefore, we set last_expr_value
2154 and last_expr_type *after* calling expand_expr. */
2155 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2156 VOIDmode, 0);
2157 type = TREE_TYPE (exp);
2159 /* If all we do is reference a volatile value in memory,
2160 copy it to a register to be sure it is actually touched. */
2161 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2163 if (TYPE_MODE (type) == VOIDmode)
2165 else if (TYPE_MODE (type) != BLKmode)
2166 value = copy_to_reg (value);
2167 else
2169 rtx lab = gen_label_rtx ();
2171 /* Compare the value with itself to reference it. */
2172 emit_cmp_and_jump_insns (value, value, EQ,
2173 expand_expr (TYPE_SIZE (type),
2174 NULL_RTX, VOIDmode, 0),
2175 BLKmode, 0, lab);
2176 emit_label (lab);
2180 /* If this expression is part of a ({...}) and is in memory, we may have
2181 to preserve temporaries. */
2182 preserve_temp_slots (value);
2184 /* Free any temporaries used to evaluate this expression. Any temporary
2185 used as a result of this expression will already have been preserved
2186 above. */
2187 free_temp_slots ();
2189 if (want_value)
2191 last_expr_value = value;
2192 last_expr_type = type;
2195 emit_queue ();
2198 /* Warn if EXP contains any computations whose results are not used.
2199 Return 1 if a warning is printed; 0 otherwise. */
2202 warn_if_unused_value (tree exp)
2204 if (TREE_USED (exp))
2205 return 0;
2207 /* Don't warn about void constructs. This includes casting to void,
2208 void function calls, and statement expressions with a final cast
2209 to void. */
2210 if (VOID_TYPE_P (TREE_TYPE (exp)))
2211 return 0;
2213 switch (TREE_CODE (exp))
2215 case PREINCREMENT_EXPR:
2216 case POSTINCREMENT_EXPR:
2217 case PREDECREMENT_EXPR:
2218 case POSTDECREMENT_EXPR:
2219 case MODIFY_EXPR:
2220 case INIT_EXPR:
2221 case TARGET_EXPR:
2222 case CALL_EXPR:
2223 case RTL_EXPR:
2224 case TRY_CATCH_EXPR:
2225 case WITH_CLEANUP_EXPR:
2226 case EXIT_EXPR:
2227 return 0;
2229 case BIND_EXPR:
2230 /* For a binding, warn if no side effect within it. */
2231 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2233 case SAVE_EXPR:
2234 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2236 case TRUTH_ORIF_EXPR:
2237 case TRUTH_ANDIF_EXPR:
2238 /* In && or ||, warn if 2nd operand has no side effect. */
2239 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2241 case COMPOUND_EXPR:
2242 if (TREE_NO_UNUSED_WARNING (exp))
2243 return 0;
2244 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2245 return 1;
2246 /* Let people do `(foo (), 0)' without a warning. */
2247 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2248 return 0;
2249 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2251 case NOP_EXPR:
2252 case CONVERT_EXPR:
2253 case NON_LVALUE_EXPR:
2254 /* Don't warn about conversions not explicit in the user's program. */
2255 if (TREE_NO_UNUSED_WARNING (exp))
2256 return 0;
2257 /* Assignment to a cast usually results in a cast of a modify.
2258 Don't complain about that. There can be an arbitrary number of
2259 casts before the modify, so we must loop until we find the first
2260 non-cast expression and then test to see if that is a modify. */
2262 tree tem = TREE_OPERAND (exp, 0);
2264 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2265 tem = TREE_OPERAND (tem, 0);
2267 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2268 || TREE_CODE (tem) == CALL_EXPR)
2269 return 0;
2271 goto maybe_warn;
2273 case INDIRECT_REF:
2274 /* Don't warn about automatic dereferencing of references, since
2275 the user cannot control it. */
2276 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2277 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2278 /* Fall through. */
2280 default:
2281 /* Referencing a volatile value is a side effect, so don't warn. */
2282 if ((DECL_P (exp)
2283 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2284 && TREE_THIS_VOLATILE (exp))
2285 return 0;
2287 /* If this is an expression which has no operands, there is no value
2288 to be unused. There are no such language-independent codes,
2289 but front ends may define such. */
2290 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2291 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2292 return 0;
2294 maybe_warn:
2295 /* If this is an expression with side effects, don't warn. */
2296 if (TREE_SIDE_EFFECTS (exp))
2297 return 0;
2299 warning ("%Hvalue computed is not used", &emit_locus);
2300 return 1;
2304 /* Clear out the memory of the last expression evaluated. */
2306 void
2307 clear_last_expr (void)
2309 last_expr_type = NULL_TREE;
2310 last_expr_value = NULL_RTX;
2313 /* Begin a statement-expression, i.e., a series of statements which
2314 may return a value. Return the RTL_EXPR for this statement expr.
2315 The caller must save that value and pass it to
2316 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2317 in the statement-expression are deallocated at the end of the
2318 expression. */
2320 tree
2321 expand_start_stmt_expr (int has_scope)
2323 tree t;
2325 /* Make the RTL_EXPR node temporary, not momentary,
2326 so that rtl_expr_chain doesn't become garbage. */
2327 t = make_node (RTL_EXPR);
2328 do_pending_stack_adjust ();
2329 if (has_scope)
2330 start_sequence_for_rtl_expr (t);
2331 else
2332 start_sequence ();
2333 NO_DEFER_POP;
2334 expr_stmts_for_value++;
2335 return t;
2338 /* Restore the previous state at the end of a statement that returns a value.
2339 Returns a tree node representing the statement's value and the
2340 insns to compute the value.
2342 The nodes of that expression have been freed by now, so we cannot use them.
2343 But we don't want to do that anyway; the expression has already been
2344 evaluated and now we just want to use the value. So generate a RTL_EXPR
2345 with the proper type and RTL value.
2347 If the last substatement was not an expression,
2348 return something with type `void'. */
2350 tree
2351 expand_end_stmt_expr (tree t)
2353 OK_DEFER_POP;
2355 if (! last_expr_value || ! last_expr_type)
2357 last_expr_value = const0_rtx;
2358 last_expr_type = void_type_node;
2360 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2361 /* Remove any possible QUEUED. */
2362 last_expr_value = protect_from_queue (last_expr_value, 0);
2364 emit_queue ();
2366 TREE_TYPE (t) = last_expr_type;
2367 RTL_EXPR_RTL (t) = last_expr_value;
2368 RTL_EXPR_SEQUENCE (t) = get_insns ();
2370 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2372 end_sequence ();
2374 /* Don't consider deleting this expr or containing exprs at tree level. */
2375 TREE_SIDE_EFFECTS (t) = 1;
2376 /* Propagate volatility of the actual RTL expr. */
2377 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2379 clear_last_expr ();
2380 expr_stmts_for_value--;
2382 return t;
2385 /* Generate RTL for the start of an if-then. COND is the expression
2386 whose truth should be tested.
2388 If EXITFLAG is nonzero, this conditional is visible to
2389 `exit_something'. */
2391 void
2392 expand_start_cond (tree cond, int exitflag)
2394 struct nesting *thiscond = ALLOC_NESTING ();
2396 /* Make an entry on cond_stack for the cond we are entering. */
2398 thiscond->desc = COND_NESTING;
2399 thiscond->next = cond_stack;
2400 thiscond->all = nesting_stack;
2401 thiscond->depth = ++nesting_depth;
2402 thiscond->data.cond.next_label = gen_label_rtx ();
2403 /* Before we encounter an `else', we don't need a separate exit label
2404 unless there are supposed to be exit statements
2405 to exit this conditional. */
2406 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2407 thiscond->data.cond.endif_label = thiscond->exit_label;
2408 cond_stack = thiscond;
2409 nesting_stack = thiscond;
2411 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2414 /* Generate RTL between then-clause and the elseif-clause
2415 of an if-then-elseif-.... */
2417 void
2418 expand_start_elseif (tree cond)
2420 if (cond_stack->data.cond.endif_label == 0)
2421 cond_stack->data.cond.endif_label = gen_label_rtx ();
2422 emit_jump (cond_stack->data.cond.endif_label);
2423 emit_label (cond_stack->data.cond.next_label);
2424 cond_stack->data.cond.next_label = gen_label_rtx ();
2425 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2428 /* Generate RTL between the then-clause and the else-clause
2429 of an if-then-else. */
2431 void
2432 expand_start_else (void)
2434 if (cond_stack->data.cond.endif_label == 0)
2435 cond_stack->data.cond.endif_label = gen_label_rtx ();
2437 emit_jump (cond_stack->data.cond.endif_label);
2438 emit_label (cond_stack->data.cond.next_label);
2439 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2442 /* After calling expand_start_else, turn this "else" into an "else if"
2443 by providing another condition. */
2445 void
2446 expand_elseif (tree cond)
2448 cond_stack->data.cond.next_label = gen_label_rtx ();
2449 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2452 /* Generate RTL for the end of an if-then.
2453 Pop the record for it off of cond_stack. */
2455 void
2456 expand_end_cond (void)
2458 struct nesting *thiscond = cond_stack;
2460 do_pending_stack_adjust ();
2461 if (thiscond->data.cond.next_label)
2462 emit_label (thiscond->data.cond.next_label);
2463 if (thiscond->data.cond.endif_label)
2464 emit_label (thiscond->data.cond.endif_label);
2466 POPSTACK (cond_stack);
2467 clear_last_expr ();
2470 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2471 loop should be exited by `exit_something'. This is a loop for which
2472 `expand_continue' will jump to the top of the loop.
2474 Make an entry on loop_stack to record the labels associated with
2475 this loop. */
2477 struct nesting *
2478 expand_start_loop (int exit_flag)
2480 struct nesting *thisloop = ALLOC_NESTING ();
2482 /* Make an entry on loop_stack for the loop we are entering. */
2484 thisloop->desc = LOOP_NESTING;
2485 thisloop->next = loop_stack;
2486 thisloop->all = nesting_stack;
2487 thisloop->depth = ++nesting_depth;
2488 thisloop->data.loop.start_label = gen_label_rtx ();
2489 thisloop->data.loop.end_label = gen_label_rtx ();
2490 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2491 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2492 loop_stack = thisloop;
2493 nesting_stack = thisloop;
2495 do_pending_stack_adjust ();
2496 emit_queue ();
2497 emit_note (NOTE_INSN_LOOP_BEG);
2498 emit_label (thisloop->data.loop.start_label);
2500 return thisloop;
2503 /* Like expand_start_loop but for a loop where the continuation point
2504 (for expand_continue_loop) will be specified explicitly. */
2506 struct nesting *
2507 expand_start_loop_continue_elsewhere (int exit_flag)
2509 struct nesting *thisloop = expand_start_loop (exit_flag);
2510 loop_stack->data.loop.continue_label = gen_label_rtx ();
2511 return thisloop;
2514 /* Begin a null, aka do { } while (0) "loop". But since the contents
2515 of said loop can still contain a break, we must frob the loop nest. */
2517 struct nesting *
2518 expand_start_null_loop (void)
2520 struct nesting *thisloop = ALLOC_NESTING ();
2522 /* Make an entry on loop_stack for the loop we are entering. */
2524 thisloop->desc = LOOP_NESTING;
2525 thisloop->next = loop_stack;
2526 thisloop->all = nesting_stack;
2527 thisloop->depth = ++nesting_depth;
2528 thisloop->data.loop.start_label = emit_note (NOTE_INSN_DELETED);
2529 thisloop->data.loop.end_label = gen_label_rtx ();
2530 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2531 thisloop->exit_label = thisloop->data.loop.end_label;
2532 loop_stack = thisloop;
2533 nesting_stack = thisloop;
2535 return thisloop;
2538 /* Specify the continuation point for a loop started with
2539 expand_start_loop_continue_elsewhere.
2540 Use this at the point in the code to which a continue statement
2541 should jump. */
2543 void
2544 expand_loop_continue_here (void)
2546 do_pending_stack_adjust ();
2547 emit_note (NOTE_INSN_LOOP_CONT);
2548 emit_label (loop_stack->data.loop.continue_label);
2551 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2552 Pop the block off of loop_stack. */
2554 void
2555 expand_end_loop (void)
2557 rtx start_label = loop_stack->data.loop.start_label;
2558 rtx etc_note;
2559 int eh_regions, debug_blocks;
2560 bool empty_test;
2562 /* Mark the continue-point at the top of the loop if none elsewhere. */
2563 if (start_label == loop_stack->data.loop.continue_label)
2564 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2566 do_pending_stack_adjust ();
2568 /* If the loop starts with a loop exit, roll that to the end where
2569 it will optimize together with the jump back.
2571 If the loop presently looks like this (in pseudo-C):
2573 LOOP_BEG
2574 start_label:
2575 if (test) goto end_label;
2576 LOOP_END_TOP_COND
2577 body;
2578 goto start_label;
2579 end_label:
2581 transform it to look like:
2583 LOOP_BEG
2584 goto start_label;
2585 top_label:
2586 body;
2587 start_label:
2588 if (test) goto end_label;
2589 goto top_label;
2590 end_label:
2592 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2593 the end of the entry conditional. Without this, our lexical scan
2594 can't tell the difference between an entry conditional and a
2595 body conditional that exits the loop. Mistaking the two means
2596 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2597 screw up loop unrolling.
2599 Things will be oh so much better when loop optimization is done
2600 off of a proper control flow graph... */
2602 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2604 empty_test = true;
2605 eh_regions = debug_blocks = 0;
2606 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2607 if (GET_CODE (etc_note) == NOTE)
2609 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2610 break;
2612 /* We must not walk into a nested loop. */
2613 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2615 etc_note = NULL_RTX;
2616 break;
2619 /* At the same time, scan for EH region notes, as we don't want
2620 to scrog region nesting. This shouldn't happen, but... */
2621 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2622 eh_regions++;
2623 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2625 if (--eh_regions < 0)
2626 /* We've come to the end of an EH region, but never saw the
2627 beginning of that region. That means that an EH region
2628 begins before the top of the loop, and ends in the middle
2629 of it. The existence of such a situation violates a basic
2630 assumption in this code, since that would imply that even
2631 when EH_REGIONS is zero, we might move code out of an
2632 exception region. */
2633 abort ();
2636 /* Likewise for debug scopes. In this case we'll either (1) move
2637 all of the notes if they are properly nested or (2) leave the
2638 notes alone and only rotate the loop at high optimization
2639 levels when we expect to scrog debug info. */
2640 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2641 debug_blocks++;
2642 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2643 debug_blocks--;
2645 else if (INSN_P (etc_note))
2646 empty_test = false;
2648 if (etc_note
2649 && optimize
2650 && ! empty_test
2651 && eh_regions == 0
2652 && (debug_blocks == 0 || optimize >= 2)
2653 && NEXT_INSN (etc_note) != NULL_RTX
2654 && ! any_condjump_p (get_last_insn ()))
2656 /* We found one. Move everything from START to ETC to the end
2657 of the loop, and add a jump from the top of the loop. */
2658 rtx top_label = gen_label_rtx ();
2659 rtx start_move = start_label;
2661 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2662 then we want to move this note also. */
2663 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2664 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2665 start_move = PREV_INSN (start_move);
2667 emit_label_before (top_label, start_move);
2669 /* Actually move the insns. If the debug scopes are nested, we
2670 can move everything at once. Otherwise we have to move them
2671 one by one and squeeze out the block notes. */
2672 if (debug_blocks == 0)
2673 reorder_insns (start_move, etc_note, get_last_insn ());
2674 else
2676 rtx insn, next_insn;
2677 for (insn = start_move; insn; insn = next_insn)
2679 /* Figure out which insn comes after this one. We have
2680 to do this before we move INSN. */
2681 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2683 if (GET_CODE (insn) == NOTE
2684 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2685 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2686 continue;
2688 reorder_insns (insn, insn, get_last_insn ());
2692 /* Add the jump from the top of the loop. */
2693 emit_jump_insn_before (gen_jump (start_label), top_label);
2694 emit_barrier_before (top_label);
2695 start_label = top_label;
2698 emit_jump (start_label);
2699 emit_note (NOTE_INSN_LOOP_END);
2700 emit_label (loop_stack->data.loop.end_label);
2702 POPSTACK (loop_stack);
2704 clear_last_expr ();
2707 /* Finish a null loop, aka do { } while (0). */
2709 void
2710 expand_end_null_loop (void)
2712 do_pending_stack_adjust ();
2713 emit_label (loop_stack->data.loop.end_label);
2715 POPSTACK (loop_stack);
2717 clear_last_expr ();
2720 /* Generate a jump to the current loop's continue-point.
2721 This is usually the top of the loop, but may be specified
2722 explicitly elsewhere. If not currently inside a loop,
2723 return 0 and do nothing; caller will print an error message. */
2726 expand_continue_loop (struct nesting *whichloop)
2728 /* Emit information for branch prediction. */
2729 rtx note;
2731 if (flag_guess_branch_prob)
2733 note = emit_note (NOTE_INSN_PREDICTION);
2734 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2736 clear_last_expr ();
2737 if (whichloop == 0)
2738 whichloop = loop_stack;
2739 if (whichloop == 0)
2740 return 0;
2741 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2742 NULL_RTX);
2743 return 1;
2746 /* Generate a jump to exit the current loop. If not currently inside a loop,
2747 return 0 and do nothing; caller will print an error message. */
2750 expand_exit_loop (struct nesting *whichloop)
2752 clear_last_expr ();
2753 if (whichloop == 0)
2754 whichloop = loop_stack;
2755 if (whichloop == 0)
2756 return 0;
2757 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2758 return 1;
2761 /* Generate a conditional jump to exit the current loop if COND
2762 evaluates to zero. If not currently inside a loop,
2763 return 0 and do nothing; caller will print an error message. */
2766 expand_exit_loop_if_false (struct nesting *whichloop, tree cond)
2768 rtx label;
2769 clear_last_expr ();
2771 if (whichloop == 0)
2772 whichloop = loop_stack;
2773 if (whichloop == 0)
2774 return 0;
2776 if (integer_nonzerop (cond))
2777 return 1;
2778 if (integer_zerop (cond))
2779 return expand_exit_loop (whichloop);
2781 /* Check if we definitely won't need a fixup. */
2782 if (whichloop == nesting_stack)
2784 jumpifnot (cond, whichloop->data.loop.end_label);
2785 return 1;
2788 /* In order to handle fixups, we actually create a conditional jump
2789 around an unconditional branch to exit the loop. If fixups are
2790 necessary, they go before the unconditional branch. */
2792 label = gen_label_rtx ();
2793 jumpif (cond, label);
2794 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2795 NULL_RTX);
2796 emit_label (label);
2798 return 1;
2801 /* Like expand_exit_loop_if_false except also emit a note marking
2802 the end of the conditional. Should only be used immediately
2803 after expand_loop_start. */
2806 expand_exit_loop_top_cond (struct nesting *whichloop, tree cond)
2808 if (! expand_exit_loop_if_false (whichloop, cond))
2809 return 0;
2811 emit_note (NOTE_INSN_LOOP_END_TOP_COND);
2812 return 1;
2815 /* Return nonzero if we should preserve sub-expressions as separate
2816 pseudos. We never do so if we aren't optimizing. We always do so
2817 if -fexpensive-optimizations.
2819 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2820 the loop may still be a small one. */
2823 preserve_subexpressions_p (void)
2825 rtx insn;
2827 if (flag_expensive_optimizations)
2828 return 1;
2830 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2831 return 0;
2833 insn = get_last_insn_anywhere ();
2835 return (insn
2836 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2837 < n_non_fixed_regs * 3));
2841 /* Generate a jump to exit the current loop, conditional, binding contour
2842 or case statement. Not all such constructs are visible to this function,
2843 only those started with EXIT_FLAG nonzero. Individual languages use
2844 the EXIT_FLAG parameter to control which kinds of constructs you can
2845 exit this way.
2847 If not currently inside anything that can be exited,
2848 return 0 and do nothing; caller will print an error message. */
2851 expand_exit_something (void)
2853 struct nesting *n;
2854 clear_last_expr ();
2855 for (n = nesting_stack; n; n = n->all)
2856 if (n->exit_label != 0)
2858 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2859 return 1;
2862 return 0;
2865 /* Generate RTL to return from the current function, with no value.
2866 (That is, we do not do anything about returning any value.) */
2868 void
2869 expand_null_return (void)
2871 rtx last_insn;
2873 last_insn = get_last_insn ();
2875 /* If this function was declared to return a value, but we
2876 didn't, clobber the return registers so that they are not
2877 propagated live to the rest of the function. */
2878 clobber_return_register ();
2880 expand_null_return_1 (last_insn);
2883 /* Try to guess whether the value of return means error code. */
2884 static enum br_predictor
2885 return_prediction (rtx val)
2887 /* Different heuristics for pointers and scalars. */
2888 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2890 /* NULL is usually not returned. */
2891 if (val == const0_rtx)
2892 return PRED_NULL_RETURN;
2894 else
2896 /* Negative return values are often used to indicate
2897 errors. */
2898 if (GET_CODE (val) == CONST_INT
2899 && INTVAL (val) < 0)
2900 return PRED_NEGATIVE_RETURN;
2901 /* Constant return values are also usually erors,
2902 zero/one often mean booleans so exclude them from the
2903 heuristics. */
2904 if (CONSTANT_P (val)
2905 && (val != const0_rtx && val != const1_rtx))
2906 return PRED_CONST_RETURN;
2908 return PRED_NO_PREDICTION;
2911 /* Generate RTL to return from the current function, with value VAL. */
2913 static void
2914 expand_value_return (rtx val)
2916 rtx last_insn;
2917 rtx return_reg;
2918 enum br_predictor pred;
2920 if (flag_guess_branch_prob
2921 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2923 /* Emit information for branch prediction. */
2924 rtx note;
2926 note = emit_note (NOTE_INSN_PREDICTION);
2928 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2932 last_insn = get_last_insn ();
2933 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2935 /* Copy the value to the return location
2936 unless it's already there. */
2938 if (return_reg != val)
2940 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2941 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
2943 int unsignedp = TREE_UNSIGNED (type);
2944 enum machine_mode old_mode
2945 = DECL_MODE (DECL_RESULT (current_function_decl));
2946 enum machine_mode mode
2947 = promote_mode (type, old_mode, &unsignedp, 1);
2949 if (mode != old_mode)
2950 val = convert_modes (mode, old_mode, val, unsignedp);
2952 if (GET_CODE (return_reg) == PARALLEL)
2953 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
2954 else
2955 emit_move_insn (return_reg, val);
2958 expand_null_return_1 (last_insn);
2961 /* Output a return with no value. If LAST_INSN is nonzero,
2962 pretend that the return takes place after LAST_INSN. */
2964 static void
2965 expand_null_return_1 (rtx last_insn)
2967 rtx end_label = cleanup_label ? cleanup_label : return_label;
2969 clear_pending_stack_adjust ();
2970 do_pending_stack_adjust ();
2971 clear_last_expr ();
2973 if (end_label == 0)
2974 end_label = return_label = gen_label_rtx ();
2975 expand_goto_internal (NULL_TREE, end_label, last_insn);
2978 /* Generate RTL to evaluate the expression RETVAL and return it
2979 from the current function. */
2981 void
2982 expand_return (tree retval)
2984 /* If there are any cleanups to be performed, then they will
2985 be inserted following LAST_INSN. It is desirable
2986 that the last_insn, for such purposes, should be the
2987 last insn before computing the return value. Otherwise, cleanups
2988 which call functions can clobber the return value. */
2989 /* ??? rms: I think that is erroneous, because in C++ it would
2990 run destructors on variables that might be used in the subsequent
2991 computation of the return value. */
2992 rtx last_insn = 0;
2993 rtx result_rtl;
2994 rtx val = 0;
2995 tree retval_rhs;
2997 /* If function wants no value, give it none. */
2998 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3000 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3001 emit_queue ();
3002 expand_null_return ();
3003 return;
3006 if (retval == error_mark_node)
3008 /* Treat this like a return of no value from a function that
3009 returns a value. */
3010 expand_null_return ();
3011 return;
3013 else if (TREE_CODE (retval) == RESULT_DECL)
3014 retval_rhs = retval;
3015 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3016 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3017 retval_rhs = TREE_OPERAND (retval, 1);
3018 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3019 /* Recognize tail-recursive call to void function. */
3020 retval_rhs = retval;
3021 else
3022 retval_rhs = NULL_TREE;
3024 last_insn = get_last_insn ();
3026 /* Distribute return down conditional expr if either of the sides
3027 may involve tail recursion (see test below). This enhances the number
3028 of tail recursions we see. Don't do this always since it can produce
3029 sub-optimal code in some cases and we distribute assignments into
3030 conditional expressions when it would help. */
3032 if (optimize && retval_rhs != 0
3033 && frame_offset == 0
3034 && TREE_CODE (retval_rhs) == COND_EXPR
3035 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3036 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3038 rtx label = gen_label_rtx ();
3039 tree expr;
3041 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3042 start_cleanup_deferral ();
3043 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3044 DECL_RESULT (current_function_decl),
3045 TREE_OPERAND (retval_rhs, 1));
3046 TREE_SIDE_EFFECTS (expr) = 1;
3047 expand_return (expr);
3048 emit_label (label);
3050 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3051 DECL_RESULT (current_function_decl),
3052 TREE_OPERAND (retval_rhs, 2));
3053 TREE_SIDE_EFFECTS (expr) = 1;
3054 expand_return (expr);
3055 end_cleanup_deferral ();
3056 return;
3059 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3061 /* If the result is an aggregate that is being returned in one (or more)
3062 registers, load the registers here. The compiler currently can't handle
3063 copying a BLKmode value into registers. We could put this code in a
3064 more general area (for use by everyone instead of just function
3065 call/return), but until this feature is generally usable it is kept here
3066 (and in expand_call). The value must go into a pseudo in case there
3067 are cleanups that will clobber the real return register. */
3069 if (retval_rhs != 0
3070 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3071 && GET_CODE (result_rtl) == REG)
3073 int i;
3074 unsigned HOST_WIDE_INT bitpos, xbitpos;
3075 unsigned HOST_WIDE_INT big_endian_correction = 0;
3076 unsigned HOST_WIDE_INT bytes
3077 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3078 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3079 unsigned int bitsize
3080 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3081 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
3082 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3083 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3084 enum machine_mode tmpmode, result_reg_mode;
3086 if (bytes == 0)
3088 expand_null_return ();
3089 return;
3092 /* Structures whose size is not a multiple of a word are aligned
3093 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3094 machine, this means we must skip the empty high order bytes when
3095 calculating the bit offset. */
3096 if (BYTES_BIG_ENDIAN
3097 && bytes % UNITS_PER_WORD)
3098 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3099 * BITS_PER_UNIT));
3101 /* Copy the structure BITSIZE bits at a time. */
3102 for (bitpos = 0, xbitpos = big_endian_correction;
3103 bitpos < bytes * BITS_PER_UNIT;
3104 bitpos += bitsize, xbitpos += bitsize)
3106 /* We need a new destination pseudo each time xbitpos is
3107 on a word boundary and when xbitpos == big_endian_correction
3108 (the first time through). */
3109 if (xbitpos % BITS_PER_WORD == 0
3110 || xbitpos == big_endian_correction)
3112 /* Generate an appropriate register. */
3113 dst = gen_reg_rtx (word_mode);
3114 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3116 /* Clear the destination before we move anything into it. */
3117 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3120 /* We need a new source operand each time bitpos is on a word
3121 boundary. */
3122 if (bitpos % BITS_PER_WORD == 0)
3123 src = operand_subword_force (result_val,
3124 bitpos / BITS_PER_WORD,
3125 BLKmode);
3127 /* Use bitpos for the source extraction (left justified) and
3128 xbitpos for the destination store (right justified). */
3129 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3130 extract_bit_field (src, bitsize,
3131 bitpos % BITS_PER_WORD, 1,
3132 NULL_RTX, word_mode, word_mode,
3133 BITS_PER_WORD),
3134 BITS_PER_WORD);
3137 /* Find the smallest integer mode large enough to hold the
3138 entire structure and use that mode instead of BLKmode
3139 on the USE insn for the return register. */
3140 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3141 tmpmode != VOIDmode;
3142 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3143 /* Have we found a large enough mode? */
3144 if (GET_MODE_SIZE (tmpmode) >= bytes)
3145 break;
3147 /* No suitable mode found. */
3148 if (tmpmode == VOIDmode)
3149 abort ();
3151 PUT_MODE (result_rtl, tmpmode);
3153 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3154 result_reg_mode = word_mode;
3155 else
3156 result_reg_mode = tmpmode;
3157 result_reg = gen_reg_rtx (result_reg_mode);
3159 emit_queue ();
3160 for (i = 0; i < n_regs; i++)
3161 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3162 result_pseudos[i]);
3164 if (tmpmode != result_reg_mode)
3165 result_reg = gen_lowpart (tmpmode, result_reg);
3167 expand_value_return (result_reg);
3169 else if (retval_rhs != 0
3170 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3171 && (GET_CODE (result_rtl) == REG
3172 || (GET_CODE (result_rtl) == PARALLEL)))
3174 /* Calculate the return value into a temporary (usually a pseudo
3175 reg). */
3176 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3177 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3179 val = assign_temp (nt, 0, 0, 1);
3180 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3181 val = force_not_mem (val);
3182 emit_queue ();
3183 /* Return the calculated value, doing cleanups first. */
3184 expand_value_return (val);
3186 else
3188 /* No cleanups or no hard reg used;
3189 calculate value into hard return reg. */
3190 expand_expr (retval, const0_rtx, VOIDmode, 0);
3191 emit_queue ();
3192 expand_value_return (result_rtl);
3196 /* Attempt to optimize a potential tail recursion call into a goto.
3197 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3198 where to place the jump to the tail recursion label.
3200 Return TRUE if the call was optimized into a goto. */
3203 optimize_tail_recursion (tree arguments, rtx last_insn)
3205 /* Finish checking validity, and if valid emit code to set the
3206 argument variables for the new call. */
3207 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3209 if (tail_recursion_label == 0)
3211 tail_recursion_label = gen_label_rtx ();
3212 emit_label_after (tail_recursion_label,
3213 tail_recursion_reentry);
3215 emit_queue ();
3216 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3217 emit_barrier ();
3218 return 1;
3220 return 0;
3223 /* Emit code to alter this function's formal parms for a tail-recursive call.
3224 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3225 FORMALS is the chain of decls of formals.
3226 Return 1 if this can be done;
3227 otherwise return 0 and do not emit any code. */
3229 static int
3230 tail_recursion_args (tree actuals, tree formals)
3232 tree a = actuals, f = formals;
3233 int i;
3234 rtx *argvec;
3236 /* Check that number and types of actuals are compatible
3237 with the formals. This is not always true in valid C code.
3238 Also check that no formal needs to be addressable
3239 and that all formals are scalars. */
3241 /* Also count the args. */
3243 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3245 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3246 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3247 return 0;
3248 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3249 return 0;
3251 if (a != 0 || f != 0)
3252 return 0;
3254 /* Compute all the actuals. */
3256 argvec = alloca (i * sizeof (rtx));
3258 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3259 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3261 /* Find which actual values refer to current values of previous formals.
3262 Copy each of them now, before any formal is changed. */
3264 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3266 int copy = 0;
3267 int j;
3268 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3269 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3271 copy = 1;
3272 break;
3274 if (copy)
3275 argvec[i] = copy_to_reg (argvec[i]);
3278 /* Store the values of the actuals into the formals. */
3280 for (f = formals, a = actuals, i = 0; f;
3281 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3283 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3284 emit_move_insn (DECL_RTL (f), argvec[i]);
3285 else
3287 rtx tmp = argvec[i];
3288 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
3289 promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3290 &unsignedp, 0);
3291 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3293 tmp = gen_reg_rtx (DECL_MODE (f));
3294 convert_move (tmp, argvec[i], unsignedp);
3296 convert_move (DECL_RTL (f), tmp, unsignedp);
3300 free_temp_slots ();
3301 return 1;
3304 /* Generate the RTL code for entering a binding contour.
3305 The variables are declared one by one, by calls to `expand_decl'.
3307 FLAGS is a bitwise or of the following flags:
3309 1 - Nonzero if this construct should be visible to
3310 `exit_something'.
3312 2 - Nonzero if this contour does not require a
3313 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3314 language-independent code should set this flag because they
3315 will not create corresponding BLOCK nodes. (There should be
3316 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3317 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3318 when expand_end_bindings is called.
3320 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3321 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3322 note. */
3324 void
3325 expand_start_bindings_and_block (int flags, tree block)
3327 struct nesting *thisblock = ALLOC_NESTING ();
3328 rtx note;
3329 int exit_flag = ((flags & 1) != 0);
3330 int block_flag = ((flags & 2) == 0);
3332 /* If a BLOCK is supplied, then the caller should be requesting a
3333 NOTE_INSN_BLOCK_BEG note. */
3334 if (!block_flag && block)
3335 abort ();
3337 /* Create a note to mark the beginning of the block. */
3338 if (block_flag)
3340 note = emit_note (NOTE_INSN_BLOCK_BEG);
3341 NOTE_BLOCK (note) = block;
3343 else
3344 note = emit_note (NOTE_INSN_DELETED);
3346 /* Make an entry on block_stack for the block we are entering. */
3348 thisblock->desc = BLOCK_NESTING;
3349 thisblock->next = block_stack;
3350 thisblock->all = nesting_stack;
3351 thisblock->depth = ++nesting_depth;
3352 thisblock->data.block.stack_level = 0;
3353 thisblock->data.block.cleanups = 0;
3354 thisblock->data.block.exception_region = 0;
3355 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3357 thisblock->data.block.conditional_code = 0;
3358 thisblock->data.block.last_unconditional_cleanup = note;
3359 /* When we insert instructions after the last unconditional cleanup,
3360 we don't adjust last_insn. That means that a later add_insn will
3361 clobber the instructions we've just added. The easiest way to
3362 fix this is to just insert another instruction here, so that the
3363 instructions inserted after the last unconditional cleanup are
3364 never the last instruction. */
3365 emit_note (NOTE_INSN_DELETED);
3367 if (block_stack
3368 && !(block_stack->data.block.cleanups == NULL_TREE
3369 && block_stack->data.block.outer_cleanups == NULL_TREE))
3370 thisblock->data.block.outer_cleanups
3371 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3372 block_stack->data.block.outer_cleanups);
3373 else
3374 thisblock->data.block.outer_cleanups = 0;
3375 thisblock->data.block.label_chain = 0;
3376 thisblock->data.block.innermost_stack_block = stack_block_stack;
3377 thisblock->data.block.first_insn = note;
3378 thisblock->data.block.block_start_count = ++current_block_start_count;
3379 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3380 block_stack = thisblock;
3381 nesting_stack = thisblock;
3383 /* Make a new level for allocating stack slots. */
3384 push_temp_slots ();
3387 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3388 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3389 expand_expr are made. After we end the region, we know that all
3390 space for all temporaries that were created by TARGET_EXPRs will be
3391 destroyed and their space freed for reuse. */
3393 void
3394 expand_start_target_temps (void)
3396 /* This is so that even if the result is preserved, the space
3397 allocated will be freed, as we know that it is no longer in use. */
3398 push_temp_slots ();
3400 /* Start a new binding layer that will keep track of all cleanup
3401 actions to be performed. */
3402 expand_start_bindings (2);
3404 target_temp_slot_level = temp_slot_level;
3407 void
3408 expand_end_target_temps (void)
3410 expand_end_bindings (NULL_TREE, 0, 0);
3412 /* This is so that even if the result is preserved, the space
3413 allocated will be freed, as we know that it is no longer in use. */
3414 pop_temp_slots ();
3417 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3418 in question represents the outermost pair of curly braces (i.e. the "body
3419 block") of a function or method.
3421 For any BLOCK node representing a "body block" of a function or method, the
3422 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3423 represents the outermost (function) scope for the function or method (i.e.
3424 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3425 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3428 is_body_block (tree stmt)
3430 if (lang_hooks.no_body_blocks)
3431 return 0;
3433 if (TREE_CODE (stmt) == BLOCK)
3435 tree parent = BLOCK_SUPERCONTEXT (stmt);
3437 if (parent && TREE_CODE (parent) == BLOCK)
3439 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3441 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3442 return 1;
3446 return 0;
3449 /* True if we are currently emitting insns in an area of output code
3450 that is controlled by a conditional expression. This is used by
3451 the cleanup handling code to generate conditional cleanup actions. */
3454 conditional_context (void)
3456 return block_stack && block_stack->data.block.conditional_code;
3459 /* Return an opaque pointer to the current nesting level, so frontend code
3460 can check its own sanity. */
3462 struct nesting *
3463 current_nesting_level (void)
3465 return cfun ? block_stack : 0;
3468 /* Emit a handler label for a nonlocal goto handler.
3469 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3471 static rtx
3472 expand_nl_handler_label (rtx slot, rtx before_insn)
3474 rtx insns;
3475 rtx handler_label = gen_label_rtx ();
3477 /* Don't let cleanup_cfg delete the handler. */
3478 LABEL_PRESERVE_P (handler_label) = 1;
3480 start_sequence ();
3481 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3482 insns = get_insns ();
3483 end_sequence ();
3484 emit_insn_before (insns, before_insn);
3486 emit_label (handler_label);
3488 return handler_label;
3491 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3492 handler. */
3493 static void
3494 expand_nl_goto_receiver (void)
3496 #ifdef HAVE_nonlocal_goto
3497 if (! HAVE_nonlocal_goto)
3498 #endif
3499 /* First adjust our frame pointer to its actual value. It was
3500 previously set to the start of the virtual area corresponding to
3501 the stacked variables when we branched here and now needs to be
3502 adjusted to the actual hardware fp value.
3504 Assignments are to virtual registers are converted by
3505 instantiate_virtual_regs into the corresponding assignment
3506 to the underlying register (fp in this case) that makes
3507 the original assignment true.
3508 So the following insn will actually be
3509 decrementing fp by STARTING_FRAME_OFFSET. */
3510 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3512 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3513 if (fixed_regs[ARG_POINTER_REGNUM])
3515 #ifdef ELIMINABLE_REGS
3516 /* If the argument pointer can be eliminated in favor of the
3517 frame pointer, we don't need to restore it. We assume here
3518 that if such an elimination is present, it can always be used.
3519 This is the case on all known machines; if we don't make this
3520 assumption, we do unnecessary saving on many machines. */
3521 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3522 size_t i;
3524 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3525 if (elim_regs[i].from == ARG_POINTER_REGNUM
3526 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3527 break;
3529 if (i == ARRAY_SIZE (elim_regs))
3530 #endif
3532 /* Now restore our arg pointer from the address at which it
3533 was saved in our stack frame. */
3534 emit_move_insn (virtual_incoming_args_rtx,
3535 copy_to_reg (get_arg_pointer_save_area (cfun)));
3538 #endif
3540 #ifdef HAVE_nonlocal_goto_receiver
3541 if (HAVE_nonlocal_goto_receiver)
3542 emit_insn (gen_nonlocal_goto_receiver ());
3543 #endif
3546 /* Make handlers for nonlocal gotos taking place in the function calls in
3547 block THISBLOCK. */
3549 static void
3550 expand_nl_goto_receivers (struct nesting *thisblock)
3552 tree link;
3553 rtx afterward = gen_label_rtx ();
3554 rtx insns, slot;
3555 rtx label_list;
3556 int any_invalid;
3558 /* Record the handler address in the stack slot for that purpose,
3559 during this block, saving and restoring the outer value. */
3560 if (thisblock->next != 0)
3561 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3563 rtx save_receiver = gen_reg_rtx (Pmode);
3564 emit_move_insn (XEXP (slot, 0), save_receiver);
3566 start_sequence ();
3567 emit_move_insn (save_receiver, XEXP (slot, 0));
3568 insns = get_insns ();
3569 end_sequence ();
3570 emit_insn_before (insns, thisblock->data.block.first_insn);
3573 /* Jump around the handlers; they run only when specially invoked. */
3574 emit_jump (afterward);
3576 /* Make a separate handler for each label. */
3577 link = nonlocal_labels;
3578 slot = nonlocal_goto_handler_slots;
3579 label_list = NULL_RTX;
3580 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3581 /* Skip any labels we shouldn't be able to jump to from here,
3582 we generate one special handler for all of them below which just calls
3583 abort. */
3584 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3586 rtx lab;
3587 lab = expand_nl_handler_label (XEXP (slot, 0),
3588 thisblock->data.block.first_insn);
3589 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3591 expand_nl_goto_receiver ();
3593 /* Jump to the "real" nonlocal label. */
3594 expand_goto (TREE_VALUE (link));
3597 /* A second pass over all nonlocal labels; this time we handle those
3598 we should not be able to jump to at this point. */
3599 link = nonlocal_labels;
3600 slot = nonlocal_goto_handler_slots;
3601 any_invalid = 0;
3602 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3603 if (DECL_TOO_LATE (TREE_VALUE (link)))
3605 rtx lab;
3606 lab = expand_nl_handler_label (XEXP (slot, 0),
3607 thisblock->data.block.first_insn);
3608 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3609 any_invalid = 1;
3612 if (any_invalid)
3614 expand_nl_goto_receiver ();
3615 expand_builtin_trap ();
3618 nonlocal_goto_handler_labels = label_list;
3619 emit_label (afterward);
3622 /* Warn about any unused VARS (which may contain nodes other than
3623 VAR_DECLs, but such nodes are ignored). The nodes are connected
3624 via the TREE_CHAIN field. */
3626 void
3627 warn_about_unused_variables (tree vars)
3629 tree decl;
3631 if (warn_unused_variable)
3632 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3633 if (TREE_CODE (decl) == VAR_DECL
3634 && ! TREE_USED (decl)
3635 && ! DECL_IN_SYSTEM_HEADER (decl)
3636 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3637 warning ("%Hunused variable '%D'", &DECL_SOURCE_LOCATION (decl), decl);
3640 /* Generate RTL code to terminate a binding contour.
3642 VARS is the chain of VAR_DECL nodes for the variables bound in this
3643 contour. There may actually be other nodes in this chain, but any
3644 nodes other than VAR_DECLS are ignored.
3646 MARK_ENDS is nonzero if we should put a note at the beginning
3647 and end of this binding contour.
3649 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3650 zero if we can jump into this contour only if it does not have a saved
3651 stack level, and negative if we are not to check for invalid use of
3652 labels (because the front end does that). */
3654 void
3655 expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
3657 struct nesting *thisblock = block_stack;
3659 /* If any of the variables in this scope were not used, warn the
3660 user. */
3661 warn_about_unused_variables (vars);
3663 if (thisblock->exit_label)
3665 do_pending_stack_adjust ();
3666 emit_label (thisblock->exit_label);
3669 /* If necessary, make handlers for nonlocal gotos taking
3670 place in the function calls in this block. */
3671 if (function_call_count != 0 && nonlocal_labels
3672 /* Make handler for outermost block
3673 if there were any nonlocal gotos to this function. */
3674 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3675 /* Make handler for inner block if it has something
3676 special to do when you jump out of it. */
3677 : (thisblock->data.block.cleanups != 0
3678 || thisblock->data.block.stack_level != 0)))
3679 expand_nl_goto_receivers (thisblock);
3681 /* Don't allow jumping into a block that has a stack level.
3682 Cleanups are allowed, though. */
3683 if (dont_jump_in > 0
3684 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3686 struct label_chain *chain;
3688 /* Any labels in this block are no longer valid to go to.
3689 Mark them to cause an error message. */
3690 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3692 DECL_TOO_LATE (chain->label) = 1;
3693 /* If any goto without a fixup came to this label,
3694 that must be an error, because gotos without fixups
3695 come from outside all saved stack-levels. */
3696 if (TREE_ADDRESSABLE (chain->label))
3697 error ("%Hlabel '%D' used before containing binding contour",
3698 &DECL_SOURCE_LOCATION (chain->label), chain->label);
3702 /* Restore stack level in effect before the block
3703 (only if variable-size objects allocated). */
3704 /* Perform any cleanups associated with the block. */
3706 if (thisblock->data.block.stack_level != 0
3707 || thisblock->data.block.cleanups != 0)
3709 int reachable;
3710 rtx insn;
3712 /* Don't let cleanups affect ({...}) constructs. */
3713 int old_expr_stmts_for_value = expr_stmts_for_value;
3714 rtx old_last_expr_value = last_expr_value;
3715 tree old_last_expr_type = last_expr_type;
3716 expr_stmts_for_value = 0;
3718 /* Only clean up here if this point can actually be reached. */
3719 insn = get_last_insn ();
3720 if (GET_CODE (insn) == NOTE)
3721 insn = prev_nonnote_insn (insn);
3722 reachable = (! insn || GET_CODE (insn) != BARRIER);
3724 /* Do the cleanups. */
3725 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3726 if (reachable)
3727 do_pending_stack_adjust ();
3729 expr_stmts_for_value = old_expr_stmts_for_value;
3730 last_expr_value = old_last_expr_value;
3731 last_expr_type = old_last_expr_type;
3733 /* Restore the stack level. */
3735 if (reachable && thisblock->data.block.stack_level != 0)
3737 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3738 thisblock->data.block.stack_level, NULL_RTX);
3739 if (nonlocal_goto_handler_slots != 0)
3740 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3741 NULL_RTX);
3744 /* Any gotos out of this block must also do these things.
3745 Also report any gotos with fixups that came to labels in this
3746 level. */
3747 fixup_gotos (thisblock,
3748 thisblock->data.block.stack_level,
3749 thisblock->data.block.cleanups,
3750 thisblock->data.block.first_insn,
3751 dont_jump_in);
3754 /* Mark the beginning and end of the scope if requested.
3755 We do this now, after running cleanups on the variables
3756 just going out of scope, so they are in scope for their cleanups. */
3758 if (mark_ends)
3760 rtx note = emit_note (NOTE_INSN_BLOCK_END);
3761 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3763 else
3764 /* Get rid of the beginning-mark if we don't make an end-mark. */
3765 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3767 /* Restore the temporary level of TARGET_EXPRs. */
3768 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3770 /* Restore block_stack level for containing block. */
3772 stack_block_stack = thisblock->data.block.innermost_stack_block;
3773 POPSTACK (block_stack);
3775 /* Pop the stack slot nesting and free any slots at this level. */
3776 pop_temp_slots ();
3779 /* Generate code to save the stack pointer at the start of the current block
3780 and set up to restore it on exit. */
3782 void
3783 save_stack_pointer (void)
3785 struct nesting *thisblock = block_stack;
3787 if (thisblock->data.block.stack_level == 0)
3789 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3790 &thisblock->data.block.stack_level,
3791 thisblock->data.block.first_insn);
3792 stack_block_stack = thisblock;
3796 /* Generate RTL for the automatic variable declaration DECL.
3797 (Other kinds of declarations are simply ignored if seen here.) */
3799 void
3800 expand_decl (tree decl)
3802 tree type;
3804 type = TREE_TYPE (decl);
3806 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3807 type in case this node is used in a reference. */
3808 if (TREE_CODE (decl) == CONST_DECL)
3810 DECL_MODE (decl) = TYPE_MODE (type);
3811 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3812 DECL_SIZE (decl) = TYPE_SIZE (type);
3813 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3814 return;
3817 /* Otherwise, only automatic variables need any expansion done. Static and
3818 external variables, and external functions, will be handled by
3819 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3820 nothing. PARM_DECLs are handled in `assign_parms'. */
3821 if (TREE_CODE (decl) != VAR_DECL)
3822 return;
3824 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3825 return;
3827 /* Create the RTL representation for the variable. */
3829 if (type == error_mark_node)
3830 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3832 else if (DECL_SIZE (decl) == 0)
3833 /* Variable with incomplete type. */
3835 rtx x;
3836 if (DECL_INITIAL (decl) == 0)
3837 /* Error message was already done; now avoid a crash. */
3838 x = gen_rtx_MEM (BLKmode, const0_rtx);
3839 else
3840 /* An initializer is going to decide the size of this array.
3841 Until we know the size, represent its address with a reg. */
3842 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3844 set_mem_attributes (x, decl, 1);
3845 SET_DECL_RTL (decl, x);
3847 else if (DECL_MODE (decl) != BLKmode
3848 /* If -ffloat-store, don't put explicit float vars
3849 into regs. */
3850 && !(flag_float_store
3851 && TREE_CODE (type) == REAL_TYPE)
3852 && ! TREE_THIS_VOLATILE (decl)
3853 && ! DECL_NONLOCAL (decl)
3854 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3856 /* Automatic variable that can go in a register. */
3857 int unsignedp = TREE_UNSIGNED (type);
3858 enum machine_mode reg_mode
3859 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3861 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3863 if (!DECL_ARTIFICIAL (decl))
3864 mark_user_reg (DECL_RTL (decl));
3866 if (POINTER_TYPE_P (type))
3867 mark_reg_pointer (DECL_RTL (decl),
3868 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3870 maybe_set_unchanging (DECL_RTL (decl), decl);
3872 /* If something wants our address, try to use ADDRESSOF. */
3873 if (TREE_ADDRESSABLE (decl))
3874 put_var_into_stack (decl, /*rescan=*/false);
3877 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3878 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3879 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3880 STACK_CHECK_MAX_VAR_SIZE)))
3882 /* Variable of fixed size that goes on the stack. */
3883 rtx oldaddr = 0;
3884 rtx addr;
3885 rtx x;
3887 /* If we previously made RTL for this decl, it must be an array
3888 whose size was determined by the initializer.
3889 The old address was a register; set that register now
3890 to the proper address. */
3891 if (DECL_RTL_SET_P (decl))
3893 if (GET_CODE (DECL_RTL (decl)) != MEM
3894 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3895 abort ();
3896 oldaddr = XEXP (DECL_RTL (decl), 0);
3899 /* Set alignment we actually gave this decl. */
3900 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3901 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3902 DECL_USER_ALIGN (decl) = 0;
3904 x = assign_temp (decl, 1, 1, 1);
3905 set_mem_attributes (x, decl, 1);
3906 SET_DECL_RTL (decl, x);
3908 if (oldaddr)
3910 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3911 if (addr != oldaddr)
3912 emit_move_insn (oldaddr, addr);
3915 else
3916 /* Dynamic-size object: must push space on the stack. */
3918 rtx address, size, x;
3920 /* Record the stack pointer on entry to block, if have
3921 not already done so. */
3922 do_pending_stack_adjust ();
3923 save_stack_pointer ();
3925 /* In function-at-a-time mode, variable_size doesn't expand this,
3926 so do it now. */
3927 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3928 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3929 const0_rtx, VOIDmode, 0);
3931 /* Compute the variable's size, in bytes. */
3932 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3933 free_temp_slots ();
3935 /* Allocate space on the stack for the variable. Note that
3936 DECL_ALIGN says how the variable is to be aligned and we
3937 cannot use it to conclude anything about the alignment of
3938 the size. */
3939 address = allocate_dynamic_stack_space (size, NULL_RTX,
3940 TYPE_ALIGN (TREE_TYPE (decl)));
3942 /* Reference the variable indirect through that rtx. */
3943 x = gen_rtx_MEM (DECL_MODE (decl), address);
3944 set_mem_attributes (x, decl, 1);
3945 SET_DECL_RTL (decl, x);
3948 /* Indicate the alignment we actually gave this variable. */
3949 #ifdef STACK_BOUNDARY
3950 DECL_ALIGN (decl) = STACK_BOUNDARY;
3951 #else
3952 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3953 #endif
3954 DECL_USER_ALIGN (decl) = 0;
3958 /* Emit code to perform the initialization of a declaration DECL. */
3960 void
3961 expand_decl_init (tree decl)
3963 int was_used = TREE_USED (decl);
3965 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3966 for static decls. */
3967 if (TREE_CODE (decl) == CONST_DECL
3968 || TREE_STATIC (decl))
3969 return;
3971 /* Compute and store the initial value now. */
3973 push_temp_slots ();
3975 if (DECL_INITIAL (decl) == error_mark_node)
3977 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3979 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3980 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3981 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3983 emit_queue ();
3985 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3987 emit_line_note (DECL_SOURCE_LOCATION (decl));
3988 expand_assignment (decl, DECL_INITIAL (decl), 0);
3989 emit_queue ();
3992 /* Don't let the initialization count as "using" the variable. */
3993 TREE_USED (decl) = was_used;
3995 /* Free any temporaries we made while initializing the decl. */
3996 preserve_temp_slots (NULL_RTX);
3997 free_temp_slots ();
3998 pop_temp_slots ();
4001 /* CLEANUP is an expression to be executed at exit from this binding contour;
4002 for example, in C++, it might call the destructor for this variable.
4004 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4005 CLEANUP multiple times, and have the correct semantics. This
4006 happens in exception handling, for gotos, returns, breaks that
4007 leave the current scope.
4009 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4010 that is not associated with any particular variable. */
4013 expand_decl_cleanup (tree decl, tree cleanup)
4015 struct nesting *thisblock;
4017 /* Error if we are not in any block. */
4018 if (cfun == 0 || block_stack == 0)
4019 return 0;
4021 thisblock = block_stack;
4023 /* Record the cleanup if there is one. */
4025 if (cleanup != 0)
4027 tree t;
4028 rtx seq;
4029 tree *cleanups = &thisblock->data.block.cleanups;
4030 int cond_context = conditional_context ();
4032 if (cond_context)
4034 rtx flag = gen_reg_rtx (word_mode);
4035 rtx set_flag_0;
4036 tree cond;
4038 start_sequence ();
4039 emit_move_insn (flag, const0_rtx);
4040 set_flag_0 = get_insns ();
4041 end_sequence ();
4043 thisblock->data.block.last_unconditional_cleanup
4044 = emit_insn_after (set_flag_0,
4045 thisblock->data.block.last_unconditional_cleanup);
4047 emit_move_insn (flag, const1_rtx);
4049 cond = build_decl (VAR_DECL, NULL_TREE,
4050 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4051 SET_DECL_RTL (cond, flag);
4053 /* Conditionalize the cleanup. */
4054 cleanup = build (COND_EXPR, void_type_node,
4055 (*lang_hooks.truthvalue_conversion) (cond),
4056 cleanup, integer_zero_node);
4057 cleanup = fold (cleanup);
4059 cleanups = &thisblock->data.block.cleanups;
4062 cleanup = unsave_expr (cleanup);
4064 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4066 if (! cond_context)
4067 /* If this block has a cleanup, it belongs in stack_block_stack. */
4068 stack_block_stack = thisblock;
4070 if (cond_context)
4072 start_sequence ();
4075 if (! using_eh_for_cleanups_p)
4076 TREE_ADDRESSABLE (t) = 1;
4077 else
4078 expand_eh_region_start ();
4080 if (cond_context)
4082 seq = get_insns ();
4083 end_sequence ();
4084 if (seq)
4085 thisblock->data.block.last_unconditional_cleanup
4086 = emit_insn_after (seq,
4087 thisblock->data.block.last_unconditional_cleanup);
4089 else
4091 thisblock->data.block.last_unconditional_cleanup
4092 = get_last_insn ();
4093 /* When we insert instructions after the last unconditional cleanup,
4094 we don't adjust last_insn. That means that a later add_insn will
4095 clobber the instructions we've just added. The easiest way to
4096 fix this is to just insert another instruction here, so that the
4097 instructions inserted after the last unconditional cleanup are
4098 never the last instruction. */
4099 emit_note (NOTE_INSN_DELETED);
4102 return 1;
4105 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4106 is thrown. */
4109 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
4111 int ret = expand_decl_cleanup (decl, cleanup);
4112 if (cleanup && ret)
4114 tree node = block_stack->data.block.cleanups;
4115 CLEANUP_EH_ONLY (node) = eh_only;
4117 return ret;
4120 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4121 DECL_ELTS is the list of elements that belong to DECL's type.
4122 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4124 void
4125 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
4127 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4128 rtx x;
4129 tree t;
4131 /* If any of the elements are addressable, so is the entire union. */
4132 for (t = decl_elts; t; t = TREE_CHAIN (t))
4133 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4135 TREE_ADDRESSABLE (decl) = 1;
4136 break;
4139 expand_decl (decl);
4140 expand_decl_cleanup (decl, cleanup);
4141 x = DECL_RTL (decl);
4143 /* Go through the elements, assigning RTL to each. */
4144 for (t = decl_elts; t; t = TREE_CHAIN (t))
4146 tree decl_elt = TREE_VALUE (t);
4147 tree cleanup_elt = TREE_PURPOSE (t);
4148 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4150 /* If any of the elements are addressable, so is the entire
4151 union. */
4152 if (TREE_USED (decl_elt))
4153 TREE_USED (decl) = 1;
4155 /* Propagate the union's alignment to the elements. */
4156 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4157 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4159 /* If the element has BLKmode and the union doesn't, the union is
4160 aligned such that the element doesn't need to have BLKmode, so
4161 change the element's mode to the appropriate one for its size. */
4162 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4163 DECL_MODE (decl_elt) = mode
4164 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4166 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4167 instead create a new MEM rtx with the proper mode. */
4168 if (GET_CODE (x) == MEM)
4170 if (mode == GET_MODE (x))
4171 SET_DECL_RTL (decl_elt, x);
4172 else
4173 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4175 else if (GET_CODE (x) == REG)
4177 if (mode == GET_MODE (x))
4178 SET_DECL_RTL (decl_elt, x);
4179 else
4180 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4182 else
4183 abort ();
4185 /* Record the cleanup if there is one. */
4187 if (cleanup != 0)
4188 thisblock->data.block.cleanups
4189 = tree_cons (decl_elt, cleanup_elt,
4190 thisblock->data.block.cleanups);
4194 /* Expand a list of cleanups LIST.
4195 Elements may be expressions or may be nested lists.
4197 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4198 goto and handle protection regions specially in that case.
4200 If REACHABLE, we emit code, otherwise just inform the exception handling
4201 code about this finalization. */
4203 static void
4204 expand_cleanups (tree list, int in_fixup, int reachable)
4206 tree tail;
4207 for (tail = list; tail; tail = TREE_CHAIN (tail))
4208 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4209 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
4210 else
4212 if (! in_fixup && using_eh_for_cleanups_p)
4213 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4215 if (reachable && !CLEANUP_EH_ONLY (tail))
4217 /* Cleanups may be run multiple times. For example,
4218 when exiting a binding contour, we expand the
4219 cleanups associated with that contour. When a goto
4220 within that binding contour has a target outside that
4221 contour, it will expand all cleanups from its scope to
4222 the target. Though the cleanups are expanded multiple
4223 times, the control paths are non-overlapping so the
4224 cleanups will not be executed twice. */
4226 /* We may need to protect from outer cleanups. */
4227 if (in_fixup && using_eh_for_cleanups_p)
4229 expand_eh_region_start ();
4231 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4233 expand_eh_region_end_fixup (TREE_VALUE (tail));
4235 else
4236 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4238 free_temp_slots ();
4243 /* Mark when the context we are emitting RTL for as a conditional
4244 context, so that any cleanup actions we register with
4245 expand_decl_init will be properly conditionalized when those
4246 cleanup actions are later performed. Must be called before any
4247 expression (tree) is expanded that is within a conditional context. */
4249 void
4250 start_cleanup_deferral (void)
4252 /* block_stack can be NULL if we are inside the parameter list. It is
4253 OK to do nothing, because cleanups aren't possible here. */
4254 if (block_stack)
4255 ++block_stack->data.block.conditional_code;
4258 /* Mark the end of a conditional region of code. Because cleanup
4259 deferrals may be nested, we may still be in a conditional region
4260 after we end the currently deferred cleanups, only after we end all
4261 deferred cleanups, are we back in unconditional code. */
4263 void
4264 end_cleanup_deferral (void)
4266 /* block_stack can be NULL if we are inside the parameter list. It is
4267 OK to do nothing, because cleanups aren't possible here. */
4268 if (block_stack)
4269 --block_stack->data.block.conditional_code;
4272 tree
4273 last_cleanup_this_contour (void)
4275 if (block_stack == 0)
4276 return 0;
4278 return block_stack->data.block.cleanups;
4281 /* Return 1 if there are any pending cleanups at this point.
4282 Check the current contour as well as contours that enclose
4283 the current contour. */
4286 any_pending_cleanups (void)
4288 struct nesting *block;
4290 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4291 return 0;
4293 if (block_stack->data.block.cleanups != NULL)
4294 return 1;
4296 if (block_stack->data.block.outer_cleanups == 0)
4297 return 0;
4299 for (block = block_stack->next; block; block = block->next)
4300 if (block->data.block.cleanups != 0)
4301 return 1;
4303 return 0;
4306 /* Enter a case (Pascal) or switch (C) statement.
4307 Push a block onto case_stack and nesting_stack
4308 to accumulate the case-labels that are seen
4309 and to record the labels generated for the statement.
4311 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4312 Otherwise, this construct is transparent for `exit_something'.
4314 EXPR is the index-expression to be dispatched on.
4315 TYPE is its nominal type. We could simply convert EXPR to this type,
4316 but instead we take short cuts. */
4318 void
4319 expand_start_case (int exit_flag, tree expr, tree type,
4320 const char *printname)
4322 struct nesting *thiscase = ALLOC_NESTING ();
4324 /* Make an entry on case_stack for the case we are entering. */
4326 thiscase->desc = CASE_NESTING;
4327 thiscase->next = case_stack;
4328 thiscase->all = nesting_stack;
4329 thiscase->depth = ++nesting_depth;
4330 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4331 thiscase->data.case_stmt.case_list = 0;
4332 thiscase->data.case_stmt.index_expr = expr;
4333 thiscase->data.case_stmt.nominal_type = type;
4334 thiscase->data.case_stmt.default_label = 0;
4335 thiscase->data.case_stmt.printname = printname;
4336 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4337 case_stack = thiscase;
4338 nesting_stack = thiscase;
4340 do_pending_stack_adjust ();
4341 emit_queue ();
4343 /* Make sure case_stmt.start points to something that won't
4344 need any transformation before expand_end_case. */
4345 if (GET_CODE (get_last_insn ()) != NOTE)
4346 emit_note (NOTE_INSN_DELETED);
4348 thiscase->data.case_stmt.start = get_last_insn ();
4350 start_cleanup_deferral ();
4353 /* Start a "dummy case statement" within which case labels are invalid
4354 and are not connected to any larger real case statement.
4355 This can be used if you don't want to let a case statement jump
4356 into the middle of certain kinds of constructs. */
4358 void
4359 expand_start_case_dummy (void)
4361 struct nesting *thiscase = ALLOC_NESTING ();
4363 /* Make an entry on case_stack for the dummy. */
4365 thiscase->desc = CASE_NESTING;
4366 thiscase->next = case_stack;
4367 thiscase->all = nesting_stack;
4368 thiscase->depth = ++nesting_depth;
4369 thiscase->exit_label = 0;
4370 thiscase->data.case_stmt.case_list = 0;
4371 thiscase->data.case_stmt.start = 0;
4372 thiscase->data.case_stmt.nominal_type = 0;
4373 thiscase->data.case_stmt.default_label = 0;
4374 case_stack = thiscase;
4375 nesting_stack = thiscase;
4376 start_cleanup_deferral ();
4379 static void
4380 check_seenlabel (void)
4382 /* If this is the first label, warn if any insns have been emitted. */
4383 if (case_stack->data.case_stmt.line_number_status >= 0)
4385 rtx insn;
4387 restore_line_number_status
4388 (case_stack->data.case_stmt.line_number_status);
4389 case_stack->data.case_stmt.line_number_status = -1;
4391 for (insn = case_stack->data.case_stmt.start;
4392 insn;
4393 insn = NEXT_INSN (insn))
4395 if (GET_CODE (insn) == CODE_LABEL)
4396 break;
4397 if (GET_CODE (insn) != NOTE
4398 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4401 insn = PREV_INSN (insn);
4402 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4404 /* If insn is zero, then there must have been a syntax error. */
4405 if (insn)
4407 location_t locus;
4408 locus.file = NOTE_SOURCE_FILE (insn);
4409 locus.line = NOTE_LINE_NUMBER (insn);
4410 warning ("%Hunreachable code at beginning of %s", &locus,
4411 case_stack->data.case_stmt.printname);
4413 break;
4419 /* Accumulate one case or default label inside a case or switch statement.
4420 VALUE is the value of the case (a null pointer, for a default label).
4421 The function CONVERTER, when applied to arguments T and V,
4422 converts the value V to the type T.
4424 If not currently inside a case or switch statement, return 1 and do
4425 nothing. The caller will print a language-specific error message.
4426 If VALUE is a duplicate or overlaps, return 2 and do nothing
4427 except store the (first) duplicate node in *DUPLICATE.
4428 If VALUE is out of range, return 3 and do nothing.
4429 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4430 Return 0 on success.
4432 Extended to handle range statements. */
4435 pushcase (tree value, tree (*converter) (tree, tree), tree label,
4436 tree *duplicate)
4438 tree index_type;
4439 tree nominal_type;
4441 /* Fail if not inside a real case statement. */
4442 if (! (case_stack && case_stack->data.case_stmt.start))
4443 return 1;
4445 if (stack_block_stack
4446 && stack_block_stack->depth > case_stack->depth)
4447 return 5;
4449 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4450 nominal_type = case_stack->data.case_stmt.nominal_type;
4452 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4453 if (index_type == error_mark_node)
4454 return 0;
4456 /* Convert VALUE to the type in which the comparisons are nominally done. */
4457 if (value != 0)
4458 value = (*converter) (nominal_type, value);
4460 check_seenlabel ();
4462 /* Fail if this value is out of range for the actual type of the index
4463 (which may be narrower than NOMINAL_TYPE). */
4464 if (value != 0
4465 && (TREE_CONSTANT_OVERFLOW (value)
4466 || ! int_fits_type_p (value, index_type)))
4467 return 3;
4469 return add_case_node (value, value, label, duplicate);
4472 /* Like pushcase but this case applies to all values between VALUE1 and
4473 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4474 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4475 starts at VALUE1 and ends at the highest value of the index type.
4476 If both are NULL, this case applies to all values.
4478 The return value is the same as that of pushcase but there is one
4479 additional error code: 4 means the specified range was empty. */
4482 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
4483 tree label, tree *duplicate)
4485 tree index_type;
4486 tree nominal_type;
4488 /* Fail if not inside a real case statement. */
4489 if (! (case_stack && case_stack->data.case_stmt.start))
4490 return 1;
4492 if (stack_block_stack
4493 && stack_block_stack->depth > case_stack->depth)
4494 return 5;
4496 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4497 nominal_type = case_stack->data.case_stmt.nominal_type;
4499 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4500 if (index_type == error_mark_node)
4501 return 0;
4503 check_seenlabel ();
4505 /* Convert VALUEs to type in which the comparisons are nominally done
4506 and replace any unspecified value with the corresponding bound. */
4507 if (value1 == 0)
4508 value1 = TYPE_MIN_VALUE (index_type);
4509 if (value2 == 0)
4510 value2 = TYPE_MAX_VALUE (index_type);
4512 /* Fail if the range is empty. Do this before any conversion since
4513 we want to allow out-of-range empty ranges. */
4514 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4515 return 4;
4517 /* If the max was unbounded, use the max of the nominal_type we are
4518 converting to. Do this after the < check above to suppress false
4519 positives. */
4520 if (value2 == 0)
4521 value2 = TYPE_MAX_VALUE (nominal_type);
4523 value1 = (*converter) (nominal_type, value1);
4524 value2 = (*converter) (nominal_type, value2);
4526 /* Fail if these values are out of range. */
4527 if (TREE_CONSTANT_OVERFLOW (value1)
4528 || ! int_fits_type_p (value1, index_type))
4529 return 3;
4531 if (TREE_CONSTANT_OVERFLOW (value2)
4532 || ! int_fits_type_p (value2, index_type))
4533 return 3;
4535 return add_case_node (value1, value2, label, duplicate);
4538 /* Do the actual insertion of a case label for pushcase and pushcase_range
4539 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4540 slowdown for large switch statements. */
4543 add_case_node (tree low, tree high, tree label, tree *duplicate)
4545 struct case_node *p, **q, *r;
4547 /* If there's no HIGH value, then this is not a case range; it's
4548 just a simple case label. But that's just a degenerate case
4549 range. */
4550 if (!high)
4551 high = low;
4553 /* Handle default labels specially. */
4554 if (!high && !low)
4556 if (case_stack->data.case_stmt.default_label != 0)
4558 *duplicate = case_stack->data.case_stmt.default_label;
4559 return 2;
4561 case_stack->data.case_stmt.default_label = label;
4562 expand_label (label);
4563 return 0;
4566 q = &case_stack->data.case_stmt.case_list;
4567 p = *q;
4569 while ((r = *q))
4571 p = r;
4573 /* Keep going past elements distinctly greater than HIGH. */
4574 if (tree_int_cst_lt (high, p->low))
4575 q = &p->left;
4577 /* or distinctly less than LOW. */
4578 else if (tree_int_cst_lt (p->high, low))
4579 q = &p->right;
4581 else
4583 /* We have an overlap; this is an error. */
4584 *duplicate = p->code_label;
4585 return 2;
4589 /* Add this label to the chain, and succeed. */
4591 r = ggc_alloc (sizeof (struct case_node));
4592 r->low = low;
4594 /* If the bounds are equal, turn this into the one-value case. */
4595 if (tree_int_cst_equal (low, high))
4596 r->high = r->low;
4597 else
4598 r->high = high;
4600 r->code_label = label;
4601 expand_label (label);
4603 *q = r;
4604 r->parent = p;
4605 r->left = 0;
4606 r->right = 0;
4607 r->balance = 0;
4609 while (p)
4611 struct case_node *s;
4613 if (r == p->left)
4615 int b;
4617 if (! (b = p->balance))
4618 /* Growth propagation from left side. */
4619 p->balance = -1;
4620 else if (b < 0)
4622 if (r->balance < 0)
4624 /* R-Rotation */
4625 if ((p->left = s = r->right))
4626 s->parent = p;
4628 r->right = p;
4629 p->balance = 0;
4630 r->balance = 0;
4631 s = p->parent;
4632 p->parent = r;
4634 if ((r->parent = s))
4636 if (s->left == p)
4637 s->left = r;
4638 else
4639 s->right = r;
4641 else
4642 case_stack->data.case_stmt.case_list = r;
4644 else
4645 /* r->balance == +1 */
4647 /* LR-Rotation */
4649 int b2;
4650 struct case_node *t = r->right;
4652 if ((p->left = s = t->right))
4653 s->parent = p;
4655 t->right = p;
4656 if ((r->right = s = t->left))
4657 s->parent = r;
4659 t->left = r;
4660 b = t->balance;
4661 b2 = b < 0;
4662 p->balance = b2;
4663 b2 = -b2 - b;
4664 r->balance = b2;
4665 t->balance = 0;
4666 s = p->parent;
4667 p->parent = t;
4668 r->parent = t;
4670 if ((t->parent = s))
4672 if (s->left == p)
4673 s->left = t;
4674 else
4675 s->right = t;
4677 else
4678 case_stack->data.case_stmt.case_list = t;
4680 break;
4683 else
4685 /* p->balance == +1; growth of left side balances the node. */
4686 p->balance = 0;
4687 break;
4690 else
4691 /* r == p->right */
4693 int b;
4695 if (! (b = p->balance))
4696 /* Growth propagation from right side. */
4697 p->balance++;
4698 else if (b > 0)
4700 if (r->balance > 0)
4702 /* L-Rotation */
4704 if ((p->right = s = r->left))
4705 s->parent = p;
4707 r->left = p;
4708 p->balance = 0;
4709 r->balance = 0;
4710 s = p->parent;
4711 p->parent = r;
4712 if ((r->parent = s))
4714 if (s->left == p)
4715 s->left = r;
4716 else
4717 s->right = r;
4720 else
4721 case_stack->data.case_stmt.case_list = r;
4724 else
4725 /* r->balance == -1 */
4727 /* RL-Rotation */
4728 int b2;
4729 struct case_node *t = r->left;
4731 if ((p->right = s = t->left))
4732 s->parent = p;
4734 t->left = p;
4736 if ((r->left = s = t->right))
4737 s->parent = r;
4739 t->right = r;
4740 b = t->balance;
4741 b2 = b < 0;
4742 r->balance = b2;
4743 b2 = -b2 - b;
4744 p->balance = b2;
4745 t->balance = 0;
4746 s = p->parent;
4747 p->parent = t;
4748 r->parent = t;
4750 if ((t->parent = s))
4752 if (s->left == p)
4753 s->left = t;
4754 else
4755 s->right = t;
4758 else
4759 case_stack->data.case_stmt.case_list = t;
4761 break;
4763 else
4765 /* p->balance == -1; growth of right side balances the node. */
4766 p->balance = 0;
4767 break;
4771 r = p;
4772 p = p->parent;
4775 return 0;
4778 /* Returns the number of possible values of TYPE.
4779 Returns -1 if the number is unknown, variable, or if the number does not
4780 fit in a HOST_WIDE_INT.
4781 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4782 do not increase monotonically (there may be duplicates);
4783 to 1 if the values increase monotonically, but not always by 1;
4784 otherwise sets it to 0. */
4786 HOST_WIDE_INT
4787 all_cases_count (tree type, int *sparseness)
4789 tree t;
4790 HOST_WIDE_INT count, minval, lastval;
4792 *sparseness = 0;
4794 switch (TREE_CODE (type))
4796 case BOOLEAN_TYPE:
4797 count = 2;
4798 break;
4800 case CHAR_TYPE:
4801 count = 1 << BITS_PER_UNIT;
4802 break;
4804 default:
4805 case INTEGER_TYPE:
4806 if (TYPE_MAX_VALUE (type) != 0
4807 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4808 TYPE_MIN_VALUE (type))))
4809 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4810 convert (type, integer_zero_node))))
4811 && host_integerp (t, 1))
4812 count = tree_low_cst (t, 1);
4813 else
4814 return -1;
4815 break;
4817 case ENUMERAL_TYPE:
4818 /* Don't waste time with enumeral types with huge values. */
4819 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4820 || TYPE_MAX_VALUE (type) == 0
4821 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4822 return -1;
4824 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4825 count = 0;
4827 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4829 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4831 if (*sparseness == 2 || thisval <= lastval)
4832 *sparseness = 2;
4833 else if (thisval != minval + count)
4834 *sparseness = 1;
4836 lastval = thisval;
4837 count++;
4841 return count;
4844 #define BITARRAY_TEST(ARRAY, INDEX) \
4845 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4846 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4847 #define BITARRAY_SET(ARRAY, INDEX) \
4848 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4849 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4851 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4852 with the case values we have seen, assuming the case expression
4853 has the given TYPE.
4854 SPARSENESS is as determined by all_cases_count.
4856 The time needed is proportional to COUNT, unless
4857 SPARSENESS is 2, in which case quadratic time is needed. */
4859 void
4860 mark_seen_cases (tree type, unsigned char *cases_seen, HOST_WIDE_INT count,
4861 int sparseness)
4863 tree next_node_to_try = NULL_TREE;
4864 HOST_WIDE_INT next_node_offset = 0;
4866 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4867 tree val = make_node (INTEGER_CST);
4869 TREE_TYPE (val) = type;
4870 if (! root)
4871 /* Do nothing. */
4873 else if (sparseness == 2)
4875 tree t;
4876 unsigned HOST_WIDE_INT xlo;
4878 /* This less efficient loop is only needed to handle
4879 duplicate case values (multiple enum constants
4880 with the same value). */
4881 TREE_TYPE (val) = TREE_TYPE (root->low);
4882 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4883 t = TREE_CHAIN (t), xlo++)
4885 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4886 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4887 n = root;
4890 /* Keep going past elements distinctly greater than VAL. */
4891 if (tree_int_cst_lt (val, n->low))
4892 n = n->left;
4894 /* or distinctly less than VAL. */
4895 else if (tree_int_cst_lt (n->high, val))
4896 n = n->right;
4898 else
4900 /* We have found a matching range. */
4901 BITARRAY_SET (cases_seen, xlo);
4902 break;
4905 while (n);
4908 else
4910 if (root->left)
4911 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4913 for (n = root; n; n = n->right)
4915 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4916 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4917 while (! tree_int_cst_lt (n->high, val))
4919 /* Calculate (into xlo) the "offset" of the integer (val).
4920 The element with lowest value has offset 0, the next smallest
4921 element has offset 1, etc. */
4923 unsigned HOST_WIDE_INT xlo;
4924 HOST_WIDE_INT xhi;
4925 tree t;
4927 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4929 /* The TYPE_VALUES will be in increasing order, so
4930 starting searching where we last ended. */
4931 t = next_node_to_try;
4932 xlo = next_node_offset;
4933 xhi = 0;
4934 for (;;)
4936 if (t == NULL_TREE)
4938 t = TYPE_VALUES (type);
4939 xlo = 0;
4941 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4943 next_node_to_try = TREE_CHAIN (t);
4944 next_node_offset = xlo + 1;
4945 break;
4947 xlo++;
4948 t = TREE_CHAIN (t);
4949 if (t == next_node_to_try)
4951 xlo = -1;
4952 break;
4956 else
4958 t = TYPE_MIN_VALUE (type);
4959 if (t)
4960 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4961 &xlo, &xhi);
4962 else
4963 xlo = xhi = 0;
4964 add_double (xlo, xhi,
4965 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4966 &xlo, &xhi);
4969 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
4970 BITARRAY_SET (cases_seen, xlo);
4972 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4973 1, 0,
4974 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4980 /* Given a switch statement with an expression that is an enumeration
4981 type, warn if any of the enumeration type's literals are not
4982 covered by the case expressions of the switch. Also, warn if there
4983 are any extra switch cases that are *not* elements of the
4984 enumerated type.
4986 Historical note:
4988 At one stage this function would: ``If all enumeration literals
4989 were covered by the case expressions, turn one of the expressions
4990 into the default expression since it should not be possible to fall
4991 through such a switch.''
4993 That code has since been removed as: ``This optimization is
4994 disabled because it causes valid programs to fail. ANSI C does not
4995 guarantee that an expression with enum type will have a value that
4996 is the same as one of the enumeration literals.'' */
4998 void
4999 check_for_full_enumeration_handling (tree type)
5001 struct case_node *n;
5002 tree chain;
5004 /* True iff the selector type is a numbered set mode. */
5005 int sparseness = 0;
5007 /* The number of possible selector values. */
5008 HOST_WIDE_INT size;
5010 /* For each possible selector value. a one iff it has been matched
5011 by a case value alternative. */
5012 unsigned char *cases_seen;
5014 /* The allocated size of cases_seen, in chars. */
5015 HOST_WIDE_INT bytes_needed;
5017 size = all_cases_count (type, &sparseness);
5018 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5020 if (size > 0 && size < 600000
5021 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5022 this optimization if we don't have enough memory rather than
5023 aborting, as xmalloc would do. */
5024 && (cases_seen = really_call_calloc (bytes_needed, 1)) != NULL)
5026 HOST_WIDE_INT i;
5027 tree v = TYPE_VALUES (type);
5029 /* The time complexity of this code is normally O(N), where
5030 N being the number of members in the enumerated type.
5031 However, if type is an ENUMERAL_TYPE whose values do not
5032 increase monotonically, O(N*log(N)) time may be needed. */
5034 mark_seen_cases (type, cases_seen, size, sparseness);
5036 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5037 if (BITARRAY_TEST (cases_seen, i) == 0)
5038 warning ("enumeration value `%s' not handled in switch",
5039 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5041 free (cases_seen);
5044 /* Now we go the other way around; we warn if there are case
5045 expressions that don't correspond to enumerators. This can
5046 occur since C and C++ don't enforce type-checking of
5047 assignments to enumeration variables. */
5049 if (case_stack->data.case_stmt.case_list
5050 && case_stack->data.case_stmt.case_list->left)
5051 case_stack->data.case_stmt.case_list
5052 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5053 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5055 for (chain = TYPE_VALUES (type);
5056 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5057 chain = TREE_CHAIN (chain))
5060 if (!chain)
5062 if (TYPE_NAME (type) == 0)
5063 warning ("case value `%ld' not in enumerated type",
5064 (long) TREE_INT_CST_LOW (n->low));
5065 else
5066 warning ("case value `%ld' not in enumerated type `%s'",
5067 (long) TREE_INT_CST_LOW (n->low),
5068 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5069 == IDENTIFIER_NODE)
5070 ? TYPE_NAME (type)
5071 : DECL_NAME (TYPE_NAME (type))));
5073 if (!tree_int_cst_equal (n->low, n->high))
5075 for (chain = TYPE_VALUES (type);
5076 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5077 chain = TREE_CHAIN (chain))
5080 if (!chain)
5082 if (TYPE_NAME (type) == 0)
5083 warning ("case value `%ld' not in enumerated type",
5084 (long) TREE_INT_CST_LOW (n->high));
5085 else
5086 warning ("case value `%ld' not in enumerated type `%s'",
5087 (long) TREE_INT_CST_LOW (n->high),
5088 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5089 == IDENTIFIER_NODE)
5090 ? TYPE_NAME (type)
5091 : DECL_NAME (TYPE_NAME (type))));
5098 /* Maximum number of case bit tests. */
5099 #define MAX_CASE_BIT_TESTS 3
5101 /* By default, enable case bit tests on targets with ashlsi3. */
5102 #ifndef CASE_USE_BIT_TESTS
5103 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
5104 != CODE_FOR_nothing)
5105 #endif
5108 /* A case_bit_test represents a set of case nodes that may be
5109 selected from using a bit-wise comparison. HI and LO hold
5110 the integer to be tested against, LABEL contains the label
5111 to jump to upon success and BITS counts the number of case
5112 nodes handled by this test, typically the number of bits
5113 set in HI:LO. */
5115 struct case_bit_test
5117 HOST_WIDE_INT hi;
5118 HOST_WIDE_INT lo;
5119 rtx label;
5120 int bits;
5123 /* Determine whether "1 << x" is relatively cheap in word_mode. */
5125 static
5126 bool lshift_cheap_p (void)
5128 static bool init = false;
5129 static bool cheap = true;
5131 if (!init)
5133 rtx reg = gen_rtx_REG (word_mode, 10000);
5134 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
5135 cheap = cost < COSTS_N_INSNS (3);
5136 init = true;
5139 return cheap;
5142 /* Comparison function for qsort to order bit tests by decreasing
5143 number of case nodes, i.e. the node with the most cases gets
5144 tested first. */
5146 static
5147 int case_bit_test_cmp (const void *p1, const void *p2)
5149 const struct case_bit_test *d1 = p1;
5150 const struct case_bit_test *d2 = p2;
5152 return d2->bits - d1->bits;
5155 /* Expand a switch statement by a short sequence of bit-wise
5156 comparisons. "switch(x)" is effectively converted into
5157 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
5158 integer constants.
5160 INDEX_EXPR is the value being switched on, which is of
5161 type INDEX_TYPE. MINVAL is the lowest case value of in
5162 the case nodes, of INDEX_TYPE type, and RANGE is highest
5163 value minus MINVAL, also of type INDEX_TYPE. NODES is
5164 the set of case nodes, and DEFAULT_LABEL is the label to
5165 branch to should none of the cases match.
5167 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
5168 node targets. */
5170 static void
5171 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
5172 tree range, case_node_ptr nodes, rtx default_label)
5174 struct case_bit_test test[MAX_CASE_BIT_TESTS];
5175 enum machine_mode mode;
5176 rtx expr, index, label;
5177 unsigned int i,j,lo,hi;
5178 struct case_node *n;
5179 unsigned int count;
5181 count = 0;
5182 for (n = nodes; n; n = n->right)
5184 label = label_rtx (n->code_label);
5185 for (i = 0; i < count; i++)
5186 if (same_case_target_p (label, test[i].label))
5187 break;
5189 if (i == count)
5191 if (count >= MAX_CASE_BIT_TESTS)
5192 abort ();
5193 test[i].hi = 0;
5194 test[i].lo = 0;
5195 test[i].label = label;
5196 test[i].bits = 1;
5197 count++;
5199 else
5200 test[i].bits++;
5202 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5203 n->low, minval)), 1);
5204 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5205 n->high, minval)), 1);
5206 for (j = lo; j <= hi; j++)
5207 if (j >= HOST_BITS_PER_WIDE_INT)
5208 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
5209 else
5210 test[i].lo |= (HOST_WIDE_INT) 1 << j;
5213 qsort (test, count, sizeof(*test), case_bit_test_cmp);
5215 index_expr = fold (build (MINUS_EXPR, index_type,
5216 convert (index_type, index_expr),
5217 convert (index_type, minval)));
5218 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5219 emit_queue ();
5220 index = protect_from_queue (index, 0);
5221 do_pending_stack_adjust ();
5223 mode = TYPE_MODE (index_type);
5224 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
5225 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
5226 default_label);
5228 index = convert_to_mode (word_mode, index, 0);
5229 index = expand_binop (word_mode, ashl_optab, const1_rtx,
5230 index, NULL_RTX, 1, OPTAB_WIDEN);
5232 for (i = 0; i < count; i++)
5234 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
5235 expr = expand_binop (word_mode, and_optab, index, expr,
5236 NULL_RTX, 1, OPTAB_WIDEN);
5237 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
5238 word_mode, 1, test[i].label);
5241 emit_jump (default_label);
5244 /* Terminate a case (Pascal) or switch (C) statement
5245 in which ORIG_INDEX is the expression to be tested.
5246 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5247 type as given in the source before any compiler conversions.
5248 Generate the code to test it and jump to the right place. */
5250 void
5251 expand_end_case_type (tree orig_index, tree orig_type)
5253 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5254 rtx default_label = 0;
5255 struct case_node *n, *m;
5256 unsigned int count, uniq;
5257 rtx index;
5258 rtx table_label;
5259 int ncases;
5260 rtx *labelvec;
5261 int i;
5262 rtx before_case, end, lab;
5263 struct nesting *thiscase = case_stack;
5264 tree index_expr, index_type;
5265 bool exit_done = false;
5266 int unsignedp;
5268 /* Don't crash due to previous errors. */
5269 if (thiscase == NULL)
5270 return;
5272 index_expr = thiscase->data.case_stmt.index_expr;
5273 index_type = TREE_TYPE (index_expr);
5274 unsignedp = TREE_UNSIGNED (index_type);
5275 if (orig_type == NULL)
5276 orig_type = TREE_TYPE (orig_index);
5278 do_pending_stack_adjust ();
5280 /* This might get a spurious warning in the presence of a syntax error;
5281 it could be fixed by moving the call to check_seenlabel after the
5282 check for error_mark_node, and copying the code of check_seenlabel that
5283 deals with case_stack->data.case_stmt.line_number_status /
5284 restore_line_number_status in front of the call to end_cleanup_deferral;
5285 However, this might miss some useful warnings in the presence of
5286 non-syntax errors. */
5287 check_seenlabel ();
5289 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5290 if (index_type != error_mark_node)
5292 /* If the switch expression was an enumerated type, check that
5293 exactly all enumeration literals are covered by the cases.
5294 The check is made when -Wswitch was specified and there is no
5295 default case, or when -Wswitch-enum was specified. */
5296 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5297 || warn_switch_enum)
5298 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5299 && TREE_CODE (index_expr) != INTEGER_CST)
5300 check_for_full_enumeration_handling (orig_type);
5302 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5303 warning ("switch missing default case");
5305 /* If we don't have a default-label, create one here,
5306 after the body of the switch. */
5307 if (thiscase->data.case_stmt.default_label == 0)
5309 thiscase->data.case_stmt.default_label
5310 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5311 /* Share the exit label if possible. */
5312 if (thiscase->exit_label)
5314 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
5315 thiscase->exit_label);
5316 exit_done = true;
5318 expand_label (thiscase->data.case_stmt.default_label);
5320 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5322 before_case = get_last_insn ();
5324 if (thiscase->data.case_stmt.case_list
5325 && thiscase->data.case_stmt.case_list->left)
5326 thiscase->data.case_stmt.case_list
5327 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5329 /* Simplify the case-list before we count it. */
5330 group_case_nodes (thiscase->data.case_stmt.case_list);
5331 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5332 default_label);
5334 /* Get upper and lower bounds of case values.
5335 Also convert all the case values to the index expr's data type. */
5337 uniq = 0;
5338 count = 0;
5339 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5341 /* Check low and high label values are integers. */
5342 if (TREE_CODE (n->low) != INTEGER_CST)
5343 abort ();
5344 if (TREE_CODE (n->high) != INTEGER_CST)
5345 abort ();
5347 n->low = convert (index_type, n->low);
5348 n->high = convert (index_type, n->high);
5350 /* Count the elements and track the largest and smallest
5351 of them (treating them as signed even if they are not). */
5352 if (count++ == 0)
5354 minval = n->low;
5355 maxval = n->high;
5357 else
5359 if (INT_CST_LT (n->low, minval))
5360 minval = n->low;
5361 if (INT_CST_LT (maxval, n->high))
5362 maxval = n->high;
5364 /* A range counts double, since it requires two compares. */
5365 if (! tree_int_cst_equal (n->low, n->high))
5366 count++;
5368 /* Count the number of unique case node targets. */
5369 uniq++;
5370 lab = label_rtx (n->code_label);
5371 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
5372 if (same_case_target_p (label_rtx (m->code_label), lab))
5374 uniq--;
5375 break;
5379 /* Compute span of values. */
5380 if (count != 0)
5381 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5383 end_cleanup_deferral ();
5385 if (count == 0)
5387 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5388 emit_queue ();
5389 emit_jump (default_label);
5392 /* Try implementing this switch statement by a short sequence of
5393 bit-wise comparisons. However, we let the binary-tree case
5394 below handle constant index expressions. */
5395 else if (CASE_USE_BIT_TESTS
5396 && ! TREE_CONSTANT (index_expr)
5397 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
5398 && compare_tree_int (range, 0) > 0
5399 && lshift_cheap_p ()
5400 && ((uniq == 1 && count >= 3)
5401 || (uniq == 2 && count >= 5)
5402 || (uniq == 3 && count >= 6)))
5404 /* Optimize the case where all the case values fit in a
5405 word without having to subtract MINVAL. In this case,
5406 we can optimize away the subtraction. */
5407 if (compare_tree_int (minval, 0) > 0
5408 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
5410 minval = integer_zero_node;
5411 range = maxval;
5413 emit_case_bit_tests (index_type, index_expr, minval, range,
5414 thiscase->data.case_stmt.case_list,
5415 default_label);
5418 /* If range of values is much bigger than number of values,
5419 make a sequence of conditional branches instead of a dispatch.
5420 If the switch-index is a constant, do it this way
5421 because we can optimize it. */
5423 else if (count < case_values_threshold ()
5424 || compare_tree_int (range,
5425 (optimize_size ? 3 : 10) * count) > 0
5426 /* RANGE may be signed, and really large ranges will show up
5427 as negative numbers. */
5428 || compare_tree_int (range, 0) < 0
5429 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5430 || flag_pic
5431 #endif
5432 || TREE_CONSTANT (index_expr))
5434 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5436 /* If the index is a short or char that we do not have
5437 an insn to handle comparisons directly, convert it to
5438 a full integer now, rather than letting each comparison
5439 generate the conversion. */
5441 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5442 && ! have_insn_for (COMPARE, GET_MODE (index)))
5444 enum machine_mode wider_mode;
5445 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5446 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5447 if (have_insn_for (COMPARE, wider_mode))
5449 index = convert_to_mode (wider_mode, index, unsignedp);
5450 break;
5454 emit_queue ();
5455 do_pending_stack_adjust ();
5457 index = protect_from_queue (index, 0);
5458 if (GET_CODE (index) == MEM)
5459 index = copy_to_reg (index);
5460 if (GET_CODE (index) == CONST_INT
5461 || TREE_CODE (index_expr) == INTEGER_CST)
5463 /* Make a tree node with the proper constant value
5464 if we don't already have one. */
5465 if (TREE_CODE (index_expr) != INTEGER_CST)
5467 index_expr
5468 = build_int_2 (INTVAL (index),
5469 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5470 index_expr = convert (index_type, index_expr);
5473 /* For constant index expressions we need only
5474 issue an unconditional branch to the appropriate
5475 target code. The job of removing any unreachable
5476 code is left to the optimization phase if the
5477 "-O" option is specified. */
5478 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5479 if (! tree_int_cst_lt (index_expr, n->low)
5480 && ! tree_int_cst_lt (n->high, index_expr))
5481 break;
5483 if (n)
5484 emit_jump (label_rtx (n->code_label));
5485 else
5486 emit_jump (default_label);
5488 else
5490 /* If the index expression is not constant we generate
5491 a binary decision tree to select the appropriate
5492 target code. This is done as follows:
5494 The list of cases is rearranged into a binary tree,
5495 nearly optimal assuming equal probability for each case.
5497 The tree is transformed into RTL, eliminating
5498 redundant test conditions at the same time.
5500 If program flow could reach the end of the
5501 decision tree an unconditional jump to the
5502 default code is emitted. */
5504 use_cost_table
5505 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5506 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5507 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5508 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5509 default_label, index_type);
5510 emit_jump_if_reachable (default_label);
5513 else
5515 table_label = gen_label_rtx ();
5516 if (! try_casesi (index_type, index_expr, minval, range,
5517 table_label, default_label))
5519 index_type = thiscase->data.case_stmt.nominal_type;
5521 /* Index jumptables from zero for suitable values of
5522 minval to avoid a subtraction. */
5523 if (! optimize_size
5524 && compare_tree_int (minval, 0) > 0
5525 && compare_tree_int (minval, 3) < 0)
5527 minval = integer_zero_node;
5528 range = maxval;
5531 if (! try_tablejump (index_type, index_expr, minval, range,
5532 table_label, default_label))
5533 abort ();
5536 /* Get table of labels to jump to, in order of case index. */
5538 ncases = tree_low_cst (range, 0) + 1;
5539 labelvec = alloca (ncases * sizeof (rtx));
5540 memset (labelvec, 0, ncases * sizeof (rtx));
5542 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5544 /* Compute the low and high bounds relative to the minimum
5545 value since that should fit in a HOST_WIDE_INT while the
5546 actual values may not. */
5547 HOST_WIDE_INT i_low
5548 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5549 n->low, minval)), 1);
5550 HOST_WIDE_INT i_high
5551 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5552 n->high, minval)), 1);
5553 HOST_WIDE_INT i;
5555 for (i = i_low; i <= i_high; i ++)
5556 labelvec[i]
5557 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5560 /* Fill in the gaps with the default. */
5561 for (i = 0; i < ncases; i++)
5562 if (labelvec[i] == 0)
5563 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5565 /* Output the table. */
5566 emit_label (table_label);
5568 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5569 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5570 gen_rtx_LABEL_REF (Pmode, table_label),
5571 gen_rtvec_v (ncases, labelvec),
5572 const0_rtx, const0_rtx));
5573 else
5574 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5575 gen_rtvec_v (ncases, labelvec)));
5577 /* If the case insn drops through the table,
5578 after the table we must jump to the default-label.
5579 Otherwise record no drop-through after the table. */
5580 #ifdef CASE_DROPS_THROUGH
5581 emit_jump (default_label);
5582 #else
5583 emit_barrier ();
5584 #endif
5587 before_case = NEXT_INSN (before_case);
5588 end = get_last_insn ();
5589 if (squeeze_notes (&before_case, &end))
5590 abort ();
5591 reorder_insns (before_case, end,
5592 thiscase->data.case_stmt.start);
5594 else
5595 end_cleanup_deferral ();
5597 if (thiscase->exit_label && !exit_done)
5598 emit_label (thiscase->exit_label);
5600 POPSTACK (case_stack);
5602 free_temp_slots ();
5605 /* Convert the tree NODE into a list linked by the right field, with the left
5606 field zeroed. RIGHT is used for recursion; it is a list to be placed
5607 rightmost in the resulting list. */
5609 static struct case_node *
5610 case_tree2list (struct case_node *node, struct case_node *right)
5612 struct case_node *left;
5614 if (node->right)
5615 right = case_tree2list (node->right, right);
5617 node->right = right;
5618 if ((left = node->left))
5620 node->left = 0;
5621 return case_tree2list (left, node);
5624 return node;
5627 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5629 static void
5630 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
5632 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5634 if (op1 == op2)
5635 emit_jump (label);
5637 else
5638 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5639 (GET_MODE (op1) == VOIDmode
5640 ? GET_MODE (op2) : GET_MODE (op1)),
5641 unsignedp, label);
5644 /* Not all case values are encountered equally. This function
5645 uses a heuristic to weight case labels, in cases where that
5646 looks like a reasonable thing to do.
5648 Right now, all we try to guess is text, and we establish the
5649 following weights:
5651 chars above space: 16
5652 digits: 16
5653 default: 12
5654 space, punct: 8
5655 tab: 4
5656 newline: 2
5657 other "\" chars: 1
5658 remaining chars: 0
5660 If we find any cases in the switch that are not either -1 or in the range
5661 of valid ASCII characters, or are control characters other than those
5662 commonly used with "\", don't treat this switch scanning text.
5664 Return 1 if these nodes are suitable for cost estimation, otherwise
5665 return 0. */
5667 static int
5668 estimate_case_costs (case_node_ptr node)
5670 tree min_ascii = integer_minus_one_node;
5671 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5672 case_node_ptr n;
5673 int i;
5675 /* If we haven't already made the cost table, make it now. Note that the
5676 lower bound of the table is -1, not zero. */
5678 if (! cost_table_initialized)
5680 cost_table_initialized = 1;
5682 for (i = 0; i < 128; i++)
5684 if (ISALNUM (i))
5685 COST_TABLE (i) = 16;
5686 else if (ISPUNCT (i))
5687 COST_TABLE (i) = 8;
5688 else if (ISCNTRL (i))
5689 COST_TABLE (i) = -1;
5692 COST_TABLE (' ') = 8;
5693 COST_TABLE ('\t') = 4;
5694 COST_TABLE ('\0') = 4;
5695 COST_TABLE ('\n') = 2;
5696 COST_TABLE ('\f') = 1;
5697 COST_TABLE ('\v') = 1;
5698 COST_TABLE ('\b') = 1;
5701 /* See if all the case expressions look like text. It is text if the
5702 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5703 as signed arithmetic since we don't want to ever access cost_table with a
5704 value less than -1. Also check that none of the constants in a range
5705 are strange control characters. */
5707 for (n = node; n; n = n->right)
5709 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5710 return 0;
5712 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5713 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5714 if (COST_TABLE (i) < 0)
5715 return 0;
5718 /* All interesting values are within the range of interesting
5719 ASCII characters. */
5720 return 1;
5723 /* Determine whether two case labels branch to the same target. */
5725 static bool
5726 same_case_target_p (rtx l1, rtx l2)
5728 rtx i1, i2;
5730 if (l1 == l2)
5731 return true;
5733 i1 = next_real_insn (l1);
5734 i2 = next_real_insn (l2);
5735 if (i1 == i2)
5736 return true;
5738 if (i1 && simplejump_p (i1))
5740 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5743 if (i2 && simplejump_p (i2))
5745 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5747 return l1 == l2;
5750 /* Delete nodes that branch to the default label from a list of
5751 case nodes. Eg. case 5: default: becomes just default: */
5753 static void
5754 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
5756 case_node_ptr ptr;
5758 while (*prev)
5760 ptr = *prev;
5761 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5762 *prev = ptr->right;
5763 else
5764 prev = &ptr->right;
5768 /* Scan an ordered list of case nodes
5769 combining those with consecutive values or ranges.
5771 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5773 static void
5774 group_case_nodes (case_node_ptr head)
5776 case_node_ptr node = head;
5778 while (node)
5780 rtx lab = label_rtx (node->code_label);
5781 case_node_ptr np = node;
5783 /* Try to group the successors of NODE with NODE. */
5784 while (((np = np->right) != 0)
5785 /* Do they jump to the same place? */
5786 && same_case_target_p (label_rtx (np->code_label), lab)
5787 /* Are their ranges consecutive? */
5788 && tree_int_cst_equal (np->low,
5789 fold (build (PLUS_EXPR,
5790 TREE_TYPE (node->high),
5791 node->high,
5792 integer_one_node)))
5793 /* An overflow is not consecutive. */
5794 && tree_int_cst_lt (node->high,
5795 fold (build (PLUS_EXPR,
5796 TREE_TYPE (node->high),
5797 node->high,
5798 integer_one_node))))
5800 node->high = np->high;
5802 /* NP is the first node after NODE which can't be grouped with it.
5803 Delete the nodes in between, and move on to that node. */
5804 node->right = np;
5805 node = np;
5809 /* Take an ordered list of case nodes
5810 and transform them into a near optimal binary tree,
5811 on the assumption that any target code selection value is as
5812 likely as any other.
5814 The transformation is performed by splitting the ordered
5815 list into two equal sections plus a pivot. The parts are
5816 then attached to the pivot as left and right branches. Each
5817 branch is then transformed recursively. */
5819 static void
5820 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
5822 case_node_ptr np;
5824 np = *head;
5825 if (np)
5827 int cost = 0;
5828 int i = 0;
5829 int ranges = 0;
5830 case_node_ptr *npp;
5831 case_node_ptr left;
5833 /* Count the number of entries on branch. Also count the ranges. */
5835 while (np)
5837 if (!tree_int_cst_equal (np->low, np->high))
5839 ranges++;
5840 if (use_cost_table)
5841 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5844 if (use_cost_table)
5845 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5847 i++;
5848 np = np->right;
5851 if (i > 2)
5853 /* Split this list if it is long enough for that to help. */
5854 npp = head;
5855 left = *npp;
5856 if (use_cost_table)
5858 /* Find the place in the list that bisects the list's total cost,
5859 Here I gets half the total cost. */
5860 int n_moved = 0;
5861 i = (cost + 1) / 2;
5862 while (1)
5864 /* Skip nodes while their cost does not reach that amount. */
5865 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5866 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5867 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5868 if (i <= 0)
5869 break;
5870 npp = &(*npp)->right;
5871 n_moved += 1;
5873 if (n_moved == 0)
5875 /* Leave this branch lopsided, but optimize left-hand
5876 side and fill in `parent' fields for right-hand side. */
5877 np = *head;
5878 np->parent = parent;
5879 balance_case_nodes (&np->left, np);
5880 for (; np->right; np = np->right)
5881 np->right->parent = np;
5882 return;
5885 /* If there are just three nodes, split at the middle one. */
5886 else if (i == 3)
5887 npp = &(*npp)->right;
5888 else
5890 /* Find the place in the list that bisects the list's total cost,
5891 where ranges count as 2.
5892 Here I gets half the total cost. */
5893 i = (i + ranges + 1) / 2;
5894 while (1)
5896 /* Skip nodes while their cost does not reach that amount. */
5897 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5898 i--;
5899 i--;
5900 if (i <= 0)
5901 break;
5902 npp = &(*npp)->right;
5905 *head = np = *npp;
5906 *npp = 0;
5907 np->parent = parent;
5908 np->left = left;
5910 /* Optimize each of the two split parts. */
5911 balance_case_nodes (&np->left, np);
5912 balance_case_nodes (&np->right, np);
5914 else
5916 /* Else leave this branch as one level,
5917 but fill in `parent' fields. */
5918 np = *head;
5919 np->parent = parent;
5920 for (; np->right; np = np->right)
5921 np->right->parent = np;
5926 /* Search the parent sections of the case node tree
5927 to see if a test for the lower bound of NODE would be redundant.
5928 INDEX_TYPE is the type of the index expression.
5930 The instructions to generate the case decision tree are
5931 output in the same order as nodes are processed so it is
5932 known that if a parent node checks the range of the current
5933 node minus one that the current node is bounded at its lower
5934 span. Thus the test would be redundant. */
5936 static int
5937 node_has_low_bound (case_node_ptr node, tree index_type)
5939 tree low_minus_one;
5940 case_node_ptr pnode;
5942 /* If the lower bound of this node is the lowest value in the index type,
5943 we need not test it. */
5945 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5946 return 1;
5948 /* If this node has a left branch, the value at the left must be less
5949 than that at this node, so it cannot be bounded at the bottom and
5950 we need not bother testing any further. */
5952 if (node->left)
5953 return 0;
5955 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5956 node->low, integer_one_node));
5958 /* If the subtraction above overflowed, we can't verify anything.
5959 Otherwise, look for a parent that tests our value - 1. */
5961 if (! tree_int_cst_lt (low_minus_one, node->low))
5962 return 0;
5964 for (pnode = node->parent; pnode; pnode = pnode->parent)
5965 if (tree_int_cst_equal (low_minus_one, pnode->high))
5966 return 1;
5968 return 0;
5971 /* Search the parent sections of the case node tree
5972 to see if a test for the upper bound of NODE would be redundant.
5973 INDEX_TYPE is the type of the index expression.
5975 The instructions to generate the case decision tree are
5976 output in the same order as nodes are processed so it is
5977 known that if a parent node checks the range of the current
5978 node plus one that the current node is bounded at its upper
5979 span. Thus the test would be redundant. */
5981 static int
5982 node_has_high_bound (case_node_ptr node, tree index_type)
5984 tree high_plus_one;
5985 case_node_ptr pnode;
5987 /* If there is no upper bound, obviously no test is needed. */
5989 if (TYPE_MAX_VALUE (index_type) == NULL)
5990 return 1;
5992 /* If the upper bound of this node is the highest value in the type
5993 of the index expression, we need not test against it. */
5995 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5996 return 1;
5998 /* If this node has a right branch, the value at the right must be greater
5999 than that at this node, so it cannot be bounded at the top and
6000 we need not bother testing any further. */
6002 if (node->right)
6003 return 0;
6005 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6006 node->high, integer_one_node));
6008 /* If the addition above overflowed, we can't verify anything.
6009 Otherwise, look for a parent that tests our value + 1. */
6011 if (! tree_int_cst_lt (node->high, high_plus_one))
6012 return 0;
6014 for (pnode = node->parent; pnode; pnode = pnode->parent)
6015 if (tree_int_cst_equal (high_plus_one, pnode->low))
6016 return 1;
6018 return 0;
6021 /* Search the parent sections of the
6022 case node tree to see if both tests for the upper and lower
6023 bounds of NODE would be redundant. */
6025 static int
6026 node_is_bounded (case_node_ptr node, tree index_type)
6028 return (node_has_low_bound (node, index_type)
6029 && node_has_high_bound (node, index_type));
6032 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6034 static void
6035 emit_jump_if_reachable (rtx label)
6037 if (GET_CODE (get_last_insn ()) != BARRIER)
6038 emit_jump (label);
6041 /* Emit step-by-step code to select a case for the value of INDEX.
6042 The thus generated decision tree follows the form of the
6043 case-node binary tree NODE, whose nodes represent test conditions.
6044 INDEX_TYPE is the type of the index of the switch.
6046 Care is taken to prune redundant tests from the decision tree
6047 by detecting any boundary conditions already checked by
6048 emitted rtx. (See node_has_high_bound, node_has_low_bound
6049 and node_is_bounded, above.)
6051 Where the test conditions can be shown to be redundant we emit
6052 an unconditional jump to the target code. As a further
6053 optimization, the subordinates of a tree node are examined to
6054 check for bounded nodes. In this case conditional and/or
6055 unconditional jumps as a result of the boundary check for the
6056 current node are arranged to target the subordinates associated
6057 code for out of bound conditions on the current node.
6059 We can assume that when control reaches the code generated here,
6060 the index value has already been compared with the parents
6061 of this node, and determined to be on the same side of each parent
6062 as this node is. Thus, if this node tests for the value 51,
6063 and a parent tested for 52, we don't need to consider
6064 the possibility of a value greater than 51. If another parent
6065 tests for the value 50, then this node need not test anything. */
6067 static void
6068 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
6069 tree index_type)
6071 /* If INDEX has an unsigned type, we must make unsigned branches. */
6072 int unsignedp = TREE_UNSIGNED (index_type);
6073 enum machine_mode mode = GET_MODE (index);
6074 enum machine_mode imode = TYPE_MODE (index_type);
6076 /* See if our parents have already tested everything for us.
6077 If they have, emit an unconditional jump for this node. */
6078 if (node_is_bounded (node, index_type))
6079 emit_jump (label_rtx (node->code_label));
6081 else if (tree_int_cst_equal (node->low, node->high))
6083 /* Node is single valued. First see if the index expression matches
6084 this node and then check our children, if any. */
6086 do_jump_if_equal (index,
6087 convert_modes (mode, imode,
6088 expand_expr (node->low, NULL_RTX,
6089 VOIDmode, 0),
6090 unsignedp),
6091 label_rtx (node->code_label), unsignedp);
6093 if (node->right != 0 && node->left != 0)
6095 /* This node has children on both sides.
6096 Dispatch to one side or the other
6097 by comparing the index value with this node's value.
6098 If one subtree is bounded, check that one first,
6099 so we can avoid real branches in the tree. */
6101 if (node_is_bounded (node->right, index_type))
6103 emit_cmp_and_jump_insns (index,
6104 convert_modes
6105 (mode, imode,
6106 expand_expr (node->high, NULL_RTX,
6107 VOIDmode, 0),
6108 unsignedp),
6109 GT, NULL_RTX, mode, unsignedp,
6110 label_rtx (node->right->code_label));
6111 emit_case_nodes (index, node->left, default_label, index_type);
6114 else if (node_is_bounded (node->left, index_type))
6116 emit_cmp_and_jump_insns (index,
6117 convert_modes
6118 (mode, imode,
6119 expand_expr (node->high, NULL_RTX,
6120 VOIDmode, 0),
6121 unsignedp),
6122 LT, NULL_RTX, mode, unsignedp,
6123 label_rtx (node->left->code_label));
6124 emit_case_nodes (index, node->right, default_label, index_type);
6127 else
6129 /* Neither node is bounded. First distinguish the two sides;
6130 then emit the code for one side at a time. */
6132 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6134 /* See if the value is on the right. */
6135 emit_cmp_and_jump_insns (index,
6136 convert_modes
6137 (mode, imode,
6138 expand_expr (node->high, NULL_RTX,
6139 VOIDmode, 0),
6140 unsignedp),
6141 GT, NULL_RTX, mode, unsignedp,
6142 label_rtx (test_label));
6144 /* Value must be on the left.
6145 Handle the left-hand subtree. */
6146 emit_case_nodes (index, node->left, default_label, index_type);
6147 /* If left-hand subtree does nothing,
6148 go to default. */
6149 emit_jump_if_reachable (default_label);
6151 /* Code branches here for the right-hand subtree. */
6152 expand_label (test_label);
6153 emit_case_nodes (index, node->right, default_label, index_type);
6157 else if (node->right != 0 && node->left == 0)
6159 /* Here we have a right child but no left so we issue conditional
6160 branch to default and process the right child.
6162 Omit the conditional branch to default if we it avoid only one
6163 right child; it costs too much space to save so little time. */
6165 if (node->right->right || node->right->left
6166 || !tree_int_cst_equal (node->right->low, node->right->high))
6168 if (!node_has_low_bound (node, index_type))
6170 emit_cmp_and_jump_insns (index,
6171 convert_modes
6172 (mode, imode,
6173 expand_expr (node->high, NULL_RTX,
6174 VOIDmode, 0),
6175 unsignedp),
6176 LT, NULL_RTX, mode, unsignedp,
6177 default_label);
6180 emit_case_nodes (index, node->right, default_label, index_type);
6182 else
6183 /* We cannot process node->right normally
6184 since we haven't ruled out the numbers less than
6185 this node's value. So handle node->right explicitly. */
6186 do_jump_if_equal (index,
6187 convert_modes
6188 (mode, imode,
6189 expand_expr (node->right->low, NULL_RTX,
6190 VOIDmode, 0),
6191 unsignedp),
6192 label_rtx (node->right->code_label), unsignedp);
6195 else if (node->right == 0 && node->left != 0)
6197 /* Just one subtree, on the left. */
6198 if (node->left->left || node->left->right
6199 || !tree_int_cst_equal (node->left->low, node->left->high))
6201 if (!node_has_high_bound (node, index_type))
6203 emit_cmp_and_jump_insns (index,
6204 convert_modes
6205 (mode, imode,
6206 expand_expr (node->high, NULL_RTX,
6207 VOIDmode, 0),
6208 unsignedp),
6209 GT, NULL_RTX, mode, unsignedp,
6210 default_label);
6213 emit_case_nodes (index, node->left, default_label, index_type);
6215 else
6216 /* We cannot process node->left normally
6217 since we haven't ruled out the numbers less than
6218 this node's value. So handle node->left explicitly. */
6219 do_jump_if_equal (index,
6220 convert_modes
6221 (mode, imode,
6222 expand_expr (node->left->low, NULL_RTX,
6223 VOIDmode, 0),
6224 unsignedp),
6225 label_rtx (node->left->code_label), unsignedp);
6228 else
6230 /* Node is a range. These cases are very similar to those for a single
6231 value, except that we do not start by testing whether this node
6232 is the one to branch to. */
6234 if (node->right != 0 && node->left != 0)
6236 /* Node has subtrees on both sides.
6237 If the right-hand subtree is bounded,
6238 test for it first, since we can go straight there.
6239 Otherwise, we need to make a branch in the control structure,
6240 then handle the two subtrees. */
6241 tree test_label = 0;
6243 if (node_is_bounded (node->right, index_type))
6244 /* Right hand node is fully bounded so we can eliminate any
6245 testing and branch directly to the target code. */
6246 emit_cmp_and_jump_insns (index,
6247 convert_modes
6248 (mode, imode,
6249 expand_expr (node->high, NULL_RTX,
6250 VOIDmode, 0),
6251 unsignedp),
6252 GT, NULL_RTX, mode, unsignedp,
6253 label_rtx (node->right->code_label));
6254 else
6256 /* Right hand node requires testing.
6257 Branch to a label where we will handle it later. */
6259 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6260 emit_cmp_and_jump_insns (index,
6261 convert_modes
6262 (mode, imode,
6263 expand_expr (node->high, NULL_RTX,
6264 VOIDmode, 0),
6265 unsignedp),
6266 GT, NULL_RTX, mode, unsignedp,
6267 label_rtx (test_label));
6270 /* Value belongs to this node or to the left-hand subtree. */
6272 emit_cmp_and_jump_insns (index,
6273 convert_modes
6274 (mode, imode,
6275 expand_expr (node->low, NULL_RTX,
6276 VOIDmode, 0),
6277 unsignedp),
6278 GE, NULL_RTX, mode, unsignedp,
6279 label_rtx (node->code_label));
6281 /* Handle the left-hand subtree. */
6282 emit_case_nodes (index, node->left, default_label, index_type);
6284 /* If right node had to be handled later, do that now. */
6286 if (test_label)
6288 /* If the left-hand subtree fell through,
6289 don't let it fall into the right-hand subtree. */
6290 emit_jump_if_reachable (default_label);
6292 expand_label (test_label);
6293 emit_case_nodes (index, node->right, default_label, index_type);
6297 else if (node->right != 0 && node->left == 0)
6299 /* Deal with values to the left of this node,
6300 if they are possible. */
6301 if (!node_has_low_bound (node, index_type))
6303 emit_cmp_and_jump_insns (index,
6304 convert_modes
6305 (mode, imode,
6306 expand_expr (node->low, NULL_RTX,
6307 VOIDmode, 0),
6308 unsignedp),
6309 LT, NULL_RTX, mode, unsignedp,
6310 default_label);
6313 /* Value belongs to this node or to the right-hand subtree. */
6315 emit_cmp_and_jump_insns (index,
6316 convert_modes
6317 (mode, imode,
6318 expand_expr (node->high, NULL_RTX,
6319 VOIDmode, 0),
6320 unsignedp),
6321 LE, NULL_RTX, mode, unsignedp,
6322 label_rtx (node->code_label));
6324 emit_case_nodes (index, node->right, default_label, index_type);
6327 else if (node->right == 0 && node->left != 0)
6329 /* Deal with values to the right of this node,
6330 if they are possible. */
6331 if (!node_has_high_bound (node, index_type))
6333 emit_cmp_and_jump_insns (index,
6334 convert_modes
6335 (mode, imode,
6336 expand_expr (node->high, NULL_RTX,
6337 VOIDmode, 0),
6338 unsignedp),
6339 GT, NULL_RTX, mode, unsignedp,
6340 default_label);
6343 /* Value belongs to this node or to the left-hand subtree. */
6345 emit_cmp_and_jump_insns (index,
6346 convert_modes
6347 (mode, imode,
6348 expand_expr (node->low, NULL_RTX,
6349 VOIDmode, 0),
6350 unsignedp),
6351 GE, NULL_RTX, mode, unsignedp,
6352 label_rtx (node->code_label));
6354 emit_case_nodes (index, node->left, default_label, index_type);
6357 else
6359 /* Node has no children so we check low and high bounds to remove
6360 redundant tests. Only one of the bounds can exist,
6361 since otherwise this node is bounded--a case tested already. */
6362 int high_bound = node_has_high_bound (node, index_type);
6363 int low_bound = node_has_low_bound (node, index_type);
6365 if (!high_bound && low_bound)
6367 emit_cmp_and_jump_insns (index,
6368 convert_modes
6369 (mode, imode,
6370 expand_expr (node->high, NULL_RTX,
6371 VOIDmode, 0),
6372 unsignedp),
6373 GT, NULL_RTX, mode, unsignedp,
6374 default_label);
6377 else if (!low_bound && high_bound)
6379 emit_cmp_and_jump_insns (index,
6380 convert_modes
6381 (mode, imode,
6382 expand_expr (node->low, NULL_RTX,
6383 VOIDmode, 0),
6384 unsignedp),
6385 LT, NULL_RTX, mode, unsignedp,
6386 default_label);
6388 else if (!low_bound && !high_bound)
6390 /* Widen LOW and HIGH to the same width as INDEX. */
6391 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6392 tree low = build1 (CONVERT_EXPR, type, node->low);
6393 tree high = build1 (CONVERT_EXPR, type, node->high);
6394 rtx low_rtx, new_index, new_bound;
6396 /* Instead of doing two branches, emit one unsigned branch for
6397 (index-low) > (high-low). */
6398 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6399 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6400 NULL_RTX, unsignedp,
6401 OPTAB_WIDEN);
6402 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6403 high, low)),
6404 NULL_RTX, mode, 0);
6406 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6407 mode, 1, default_label);
6410 emit_jump (label_rtx (node->code_label));
6415 #include "gt-stmt.h"