2003-12-26 Guilhem Lavaux <guilhem@kaffe.org>
[official-gcc.git] / gcc / stmt.c
blob7b388dd6c1d8ff58253dc00abaa4b0a337447e2c
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
60 #include "target.h"
62 /* Assume that case vectors are not pc-relative. */
63 #ifndef CASE_VECTOR_PC_RELATIVE
64 #define CASE_VECTOR_PC_RELATIVE 0
65 #endif
67 /* Functions and data structures for expanding case statements. */
69 /* Case label structure, used to hold info on labels within case
70 statements. We handle "range" labels; for a single-value label
71 as in C, the high and low limits are the same.
73 An AVL tree of case nodes is initially created, and later transformed
74 to a list linked via the RIGHT fields in the nodes. Nodes with
75 higher case values are later in the list.
77 Switch statements can be output in one of two forms. A branch table
78 is used if there are more than a few labels and the labels are dense
79 within the range between the smallest and largest case value. If a
80 branch table is used, no further manipulations are done with the case
81 node chain.
83 The alternative to the use of a branch table is to generate a series
84 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
85 and PARENT fields to hold a binary tree. Initially the tree is
86 totally unbalanced, with everything on the right. We balance the tree
87 with nodes on the left having lower case values than the parent
88 and nodes on the right having higher values. We then output the tree
89 in order. */
91 struct case_node GTY(())
93 struct case_node *left; /* Left son in binary tree */
94 struct case_node *right; /* Right son in binary tree; also node chain */
95 struct case_node *parent; /* Parent of node in binary tree */
96 tree low; /* Lowest index value for this label */
97 tree high; /* Highest index value for this label */
98 tree code_label; /* Label to jump to when node matches */
99 int balance;
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
105 /* These are used by estimate_case_costs and balance_case_nodes. */
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static int use_cost_table;
110 static int cost_table_initialized;
112 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
113 is unsigned. */
114 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
116 /* Stack of control and binding constructs we are currently inside.
118 These constructs begin when you call `expand_start_WHATEVER'
119 and end when you call `expand_end_WHATEVER'. This stack records
120 info about how the construct began that tells the end-function
121 what to do. It also may provide information about the construct
122 to alter the behavior of other constructs within the body.
123 For example, they may affect the behavior of C `break' and `continue'.
125 Each construct gets one `struct nesting' object.
126 All of these objects are chained through the `all' field.
127 `nesting_stack' points to the first object (innermost construct).
128 The position of an entry on `nesting_stack' is in its `depth' field.
130 Each type of construct has its own individual stack.
131 For example, loops have `loop_stack'. Each object points to the
132 next object of the same type through the `next' field.
134 Some constructs are visible to `break' exit-statements and others
135 are not. Which constructs are visible depends on the language.
136 Therefore, the data structure allows each construct to be visible
137 or not, according to the args given when the construct is started.
138 The construct is visible if the `exit_label' field is non-null.
139 In that case, the value should be a CODE_LABEL rtx. */
141 struct nesting GTY(())
143 struct nesting *all;
144 struct nesting *next;
145 int depth;
146 rtx exit_label;
147 enum nesting_desc {
148 COND_NESTING,
149 LOOP_NESTING,
150 BLOCK_NESTING,
151 CASE_NESTING
152 } desc;
153 union nesting_u
155 /* For conds (if-then and if-then-else statements). */
156 struct nesting_cond
158 /* Label for the end of the if construct.
159 There is none if EXITFLAG was not set
160 and no `else' has been seen yet. */
161 rtx endif_label;
162 /* Label for the end of this alternative.
163 This may be the end of the if or the next else/elseif. */
164 rtx next_label;
165 } GTY ((tag ("COND_NESTING"))) cond;
166 /* For loops. */
167 struct nesting_loop
169 /* Label at the top of the loop; place to loop back to. */
170 rtx start_label;
171 /* Label at the end of the whole construct. */
172 rtx end_label;
173 /* Label for `continue' statement to jump to;
174 this is in front of the stepper of the loop. */
175 rtx continue_label;
176 } GTY ((tag ("LOOP_NESTING"))) loop;
177 /* For variable binding contours. */
178 struct nesting_block
180 /* Sequence number of this binding contour within the function,
181 in order of entry. */
182 int block_start_count;
183 /* Nonzero => value to restore stack to on exit. */
184 rtx stack_level;
185 /* The NOTE that starts this contour.
186 Used by expand_goto to check whether the destination
187 is within each contour or not. */
188 rtx first_insn;
189 /* Innermost containing binding contour that has a stack level. */
190 struct nesting *innermost_stack_block;
191 /* List of cleanups to be run on exit from this contour.
192 This is a list of expressions to be evaluated.
193 The TREE_PURPOSE of each link is the ..._DECL node
194 which the cleanup pertains to. */
195 tree cleanups;
196 /* List of cleanup-lists of blocks containing this block,
197 as they were at the locus where this block appears.
198 There is an element for each containing block,
199 ordered innermost containing block first.
200 The tail of this list can be 0,
201 if all remaining elements would be empty lists.
202 The element's TREE_VALUE is the cleanup-list of that block,
203 which may be null. */
204 tree outer_cleanups;
205 /* Chain of labels defined inside this binding contour.
206 For contours that have stack levels or cleanups. */
207 struct label_chain *label_chain;
208 /* Nonzero if this is associated with an EH region. */
209 int exception_region;
210 /* The saved target_temp_slot_level from our outer block.
211 We may reset target_temp_slot_level to be the level of
212 this block, if that is done, target_temp_slot_level
213 reverts to the saved target_temp_slot_level at the very
214 end of the block. */
215 int block_target_temp_slot_level;
216 /* True if we are currently emitting insns in an area of
217 output code that is controlled by a conditional
218 expression. This is used by the cleanup handling code to
219 generate conditional cleanup actions. */
220 int conditional_code;
221 /* A place to move the start of the exception region for any
222 of the conditional cleanups, must be at the end or after
223 the start of the last unconditional cleanup, and before any
224 conditional branch points. */
225 rtx last_unconditional_cleanup;
226 } GTY ((tag ("BLOCK_NESTING"))) block;
227 /* For switch (C) or case (Pascal) statements,
228 and also for dummies (see `expand_start_case_dummy'). */
229 struct nesting_case
231 /* The insn after which the case dispatch should finally
232 be emitted. Zero for a dummy. */
233 rtx start;
234 /* A list of case labels; it is first built as an AVL tree.
235 During expand_end_case, this is converted to a list, and may be
236 rearranged into a nearly balanced binary tree. */
237 struct case_node *case_list;
238 /* Label to jump to if no case matches. */
239 tree default_label;
240 /* The expression to be dispatched on. */
241 tree index_expr;
242 /* Type that INDEX_EXPR should be converted to. */
243 tree nominal_type;
244 /* Name of this kind of statement, for warnings. */
245 const char *printname;
246 /* Used to save no_line_numbers till we see the first case label.
247 We set this to -1 when we see the first case label in this
248 case statement. */
249 int line_number_status;
250 } GTY ((tag ("CASE_NESTING"))) case_stmt;
251 } GTY ((desc ("%1.desc"))) data;
254 /* Allocate and return a new `struct nesting'. */
256 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
258 /* Pop the nesting stack element by element until we pop off
259 the element which is at the top of STACK.
260 Update all the other stacks, popping off elements from them
261 as we pop them from nesting_stack. */
263 #define POPSTACK(STACK) \
264 do { struct nesting *target = STACK; \
265 struct nesting *this; \
266 do { this = nesting_stack; \
267 if (loop_stack == this) \
268 loop_stack = loop_stack->next; \
269 if (cond_stack == this) \
270 cond_stack = cond_stack->next; \
271 if (block_stack == this) \
272 block_stack = block_stack->next; \
273 if (stack_block_stack == this) \
274 stack_block_stack = stack_block_stack->next; \
275 if (case_stack == this) \
276 case_stack = case_stack->next; \
277 nesting_depth = nesting_stack->depth - 1; \
278 nesting_stack = this->all; } \
279 while (this != target); } while (0)
281 /* In some cases it is impossible to generate code for a forward goto
282 until the label definition is seen. This happens when it may be necessary
283 for the goto to reset the stack pointer: we don't yet know how to do that.
284 So expand_goto puts an entry on this fixup list.
285 Each time a binding contour that resets the stack is exited,
286 we check each fixup.
287 If the target label has now been defined, we can insert the proper code. */
289 struct goto_fixup GTY(())
291 /* Points to following fixup. */
292 struct goto_fixup *next;
293 /* Points to the insn before the jump insn.
294 If more code must be inserted, it goes after this insn. */
295 rtx before_jump;
296 /* The LABEL_DECL that this jump is jumping to, or 0
297 for break, continue or return. */
298 tree target;
299 /* The BLOCK for the place where this goto was found. */
300 tree context;
301 /* The CODE_LABEL rtx that this is jumping to. */
302 rtx target_rtl;
303 /* Number of binding contours started in current function
304 before the label reference. */
305 int block_start_count;
306 /* The outermost stack level that should be restored for this jump.
307 Each time a binding contour that resets the stack is exited,
308 if the target label is *not* yet defined, this slot is updated. */
309 rtx stack_level;
310 /* List of lists of cleanup expressions to be run by this goto.
311 There is one element for each block that this goto is within.
312 The tail of this list can be 0,
313 if all remaining elements would be empty.
314 The TREE_VALUE contains the cleanup list of that block as of the
315 time this goto was seen.
316 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
317 tree cleanup_list_list;
320 /* Within any binding contour that must restore a stack level,
321 all labels are recorded with a chain of these structures. */
323 struct label_chain GTY(())
325 /* Points to following fixup. */
326 struct label_chain *next;
327 tree label;
330 struct stmt_status GTY(())
332 /* Chain of all pending binding contours. */
333 struct nesting * x_block_stack;
335 /* If any new stacks are added here, add them to POPSTACKS too. */
337 /* Chain of all pending binding contours that restore stack levels
338 or have cleanups. */
339 struct nesting * x_stack_block_stack;
341 /* Chain of all pending conditional statements. */
342 struct nesting * x_cond_stack;
344 /* Chain of all pending loops. */
345 struct nesting * x_loop_stack;
347 /* Chain of all pending case or switch statements. */
348 struct nesting * x_case_stack;
350 /* Separate chain including all of the above,
351 chained through the `all' field. */
352 struct nesting * x_nesting_stack;
354 /* Number of entries on nesting_stack now. */
355 int x_nesting_depth;
357 /* Number of binding contours started so far in this function. */
358 int x_block_start_count;
360 /* Each time we expand an expression-statement,
361 record the expr's type and its RTL value here. */
362 tree x_last_expr_type;
363 rtx x_last_expr_value;
365 /* Nonzero if within a ({...}) grouping, in which case we must
366 always compute a value for each expr-stmt in case it is the last one. */
367 int x_expr_stmts_for_value;
369 /* Location of last line-number note, whether we actually
370 emitted it or not. */
371 location_t x_emit_locus;
373 struct goto_fixup *x_goto_fixup_chain;
376 #define block_stack (cfun->stmt->x_block_stack)
377 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
378 #define cond_stack (cfun->stmt->x_cond_stack)
379 #define loop_stack (cfun->stmt->x_loop_stack)
380 #define case_stack (cfun->stmt->x_case_stack)
381 #define nesting_stack (cfun->stmt->x_nesting_stack)
382 #define nesting_depth (cfun->stmt->x_nesting_depth)
383 #define current_block_start_count (cfun->stmt->x_block_start_count)
384 #define last_expr_type (cfun->stmt->x_last_expr_type)
385 #define last_expr_value (cfun->stmt->x_last_expr_value)
386 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
387 #define emit_locus (cfun->stmt->x_emit_locus)
388 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
390 /* Nonzero if we are using EH to handle cleanups. */
391 static int using_eh_for_cleanups_p = 0;
393 static int n_occurrences (int, const char *);
394 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
395 static void expand_goto_internal (tree, rtx, rtx);
396 static int expand_fixup (tree, rtx, rtx);
397 static rtx expand_nl_handler_label (rtx, rtx);
398 static void expand_nl_goto_receiver (void);
399 static void expand_nl_goto_receivers (struct nesting *);
400 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
401 static bool check_operand_nalternatives (tree, tree);
402 static bool check_unique_operand_names (tree, tree);
403 static char *resolve_operand_name_1 (char *, tree, tree);
404 static void expand_null_return_1 (rtx);
405 static enum br_predictor return_prediction (rtx);
406 static rtx shift_return_value (rtx);
407 static void expand_value_return (rtx);
408 static int tail_recursion_args (tree, tree);
409 static void expand_cleanups (tree, int, int);
410 static void check_seenlabel (void);
411 static void do_jump_if_equal (rtx, rtx, rtx, int);
412 static int estimate_case_costs (case_node_ptr);
413 static bool same_case_target_p (rtx, rtx);
414 static void strip_default_case_nodes (case_node_ptr *, rtx);
415 static bool lshift_cheap_p (void);
416 static int case_bit_test_cmp (const void *, const void *);
417 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
418 static void group_case_nodes (case_node_ptr);
419 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
420 static int node_has_low_bound (case_node_ptr, tree);
421 static int node_has_high_bound (case_node_ptr, tree);
422 static int node_is_bounded (case_node_ptr, tree);
423 static void emit_jump_if_reachable (rtx);
424 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
425 static struct case_node *case_tree2list (case_node *, case_node *);
427 void
428 using_eh_for_cleanups (void)
430 using_eh_for_cleanups_p = 1;
433 void
434 init_stmt_for_function (void)
436 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
439 /* Record the current file and line. Called from emit_line_note. */
441 void
442 set_file_and_line_for_stmt (location_t location)
444 /* If we're outputting an inline function, and we add a line note,
445 there may be no CFUN->STMT information. So, there's no need to
446 update it. */
447 if (cfun->stmt)
448 emit_locus = location;
451 /* Emit a no-op instruction. */
453 void
454 emit_nop (void)
456 rtx last_insn;
458 last_insn = get_last_insn ();
459 if (!optimize
460 && (GET_CODE (last_insn) == CODE_LABEL
461 || (GET_CODE (last_insn) == NOTE
462 && prev_real_insn (last_insn) == 0)))
463 emit_insn (gen_nop ());
466 /* Return the rtx-label that corresponds to a LABEL_DECL,
467 creating it if necessary. */
470 label_rtx (tree label)
472 if (TREE_CODE (label) != LABEL_DECL)
473 abort ();
475 if (!DECL_RTL_SET_P (label))
476 SET_DECL_RTL (label, gen_label_rtx ());
478 return DECL_RTL (label);
481 /* As above, but also put it on the forced-reference list of the
482 function that contains it. */
484 force_label_rtx (tree label)
486 rtx ref = label_rtx (label);
487 tree function = decl_function_context (label);
488 struct function *p;
490 if (!function)
491 abort ();
493 if (function != current_function_decl
494 && function != inline_function_decl)
495 p = find_function_data (function);
496 else
497 p = cfun;
499 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
500 p->expr->x_forced_labels);
501 return ref;
504 /* Add an unconditional jump to LABEL as the next sequential instruction. */
506 void
507 emit_jump (rtx label)
509 do_pending_stack_adjust ();
510 emit_jump_insn (gen_jump (label));
511 emit_barrier ();
514 /* Emit code to jump to the address
515 specified by the pointer expression EXP. */
517 void
518 expand_computed_goto (tree exp)
520 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
522 x = convert_memory_address (Pmode, x);
524 emit_queue ();
526 if (! cfun->computed_goto_common_label)
528 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
529 cfun->computed_goto_common_label = gen_label_rtx ();
530 emit_label (cfun->computed_goto_common_label);
532 do_pending_stack_adjust ();
533 emit_indirect_jump (cfun->computed_goto_common_reg);
535 current_function_has_computed_jump = 1;
537 else
539 emit_move_insn (cfun->computed_goto_common_reg, x);
540 emit_jump (cfun->computed_goto_common_label);
544 /* Handle goto statements and the labels that they can go to. */
546 /* Specify the location in the RTL code of a label LABEL,
547 which is a LABEL_DECL tree node.
549 This is used for the kind of label that the user can jump to with a
550 goto statement, and for alternatives of a switch or case statement.
551 RTL labels generated for loops and conditionals don't go through here;
552 they are generated directly at the RTL level, by other functions below.
554 Note that this has nothing to do with defining label *names*.
555 Languages vary in how they do that and what that even means. */
557 void
558 expand_label (tree label)
560 struct label_chain *p;
562 do_pending_stack_adjust ();
563 emit_label (label_rtx (label));
564 if (DECL_NAME (label))
565 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
567 if (stack_block_stack != 0)
569 p = ggc_alloc (sizeof (struct label_chain));
570 p->next = stack_block_stack->data.block.label_chain;
571 stack_block_stack->data.block.label_chain = p;
572 p->label = label;
576 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
577 from nested functions. */
579 void
580 declare_nonlocal_label (tree label)
582 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
584 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
585 LABEL_PRESERVE_P (label_rtx (label)) = 1;
586 if (nonlocal_goto_handler_slots == 0)
588 emit_stack_save (SAVE_NONLOCAL,
589 &nonlocal_goto_stack_level,
590 PREV_INSN (tail_recursion_reentry));
592 nonlocal_goto_handler_slots
593 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
596 /* Generate RTL code for a `goto' statement with target label LABEL.
597 LABEL should be a LABEL_DECL tree node that was or will later be
598 defined with `expand_label'. */
600 void
601 expand_goto (tree label)
603 tree context;
605 /* Check for a nonlocal goto to a containing function. */
606 context = decl_function_context (label);
607 if (context != 0 && context != current_function_decl)
609 struct function *p = find_function_data (context);
610 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
611 rtx handler_slot, static_chain, save_area, insn;
612 tree link;
614 /* Find the corresponding handler slot for this label. */
615 handler_slot = p->x_nonlocal_goto_handler_slots;
616 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
617 link = TREE_CHAIN (link))
618 handler_slot = XEXP (handler_slot, 1);
619 handler_slot = XEXP (handler_slot, 0);
621 p->has_nonlocal_label = 1;
622 current_function_has_nonlocal_goto = 1;
623 LABEL_REF_NONLOCAL_P (label_ref) = 1;
625 /* Copy the rtl for the slots so that they won't be shared in
626 case the virtual stack vars register gets instantiated differently
627 in the parent than in the child. */
629 static_chain = copy_to_reg (lookup_static_chain (label));
631 /* Get addr of containing function's current nonlocal goto handler,
632 which will do any cleanups and then jump to the label. */
633 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
634 virtual_stack_vars_rtx,
635 static_chain));
637 /* Get addr of containing function's nonlocal save area. */
638 save_area = p->x_nonlocal_goto_stack_level;
639 if (save_area)
640 save_area = replace_rtx (copy_rtx (save_area),
641 virtual_stack_vars_rtx, static_chain);
643 #if HAVE_nonlocal_goto
644 if (HAVE_nonlocal_goto)
645 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
646 save_area, label_ref));
647 else
648 #endif
650 emit_insn (gen_rtx_CLOBBER (VOIDmode,
651 gen_rtx_MEM (BLKmode,
652 gen_rtx_SCRATCH (VOIDmode))));
653 emit_insn (gen_rtx_CLOBBER (VOIDmode,
654 gen_rtx_MEM (BLKmode,
655 hard_frame_pointer_rtx)));
657 /* Restore frame pointer for containing function.
658 This sets the actual hard register used for the frame pointer
659 to the location of the function's incoming static chain info.
660 The non-local goto handler will then adjust it to contain the
661 proper value and reload the argument pointer, if needed. */
662 emit_move_insn (hard_frame_pointer_rtx, static_chain);
663 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
665 /* USE of hard_frame_pointer_rtx added for consistency;
666 not clear if really needed. */
667 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
668 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
669 emit_indirect_jump (handler_slot);
672 /* Search backwards to the jump insn and mark it as a
673 non-local goto. */
674 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
676 if (GET_CODE (insn) == JUMP_INSN)
678 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
679 const0_rtx, REG_NOTES (insn));
680 break;
682 else if (GET_CODE (insn) == CALL_INSN)
683 break;
686 else
687 expand_goto_internal (label, label_rtx (label), NULL_RTX);
690 /* Generate RTL code for a `goto' statement with target label BODY.
691 LABEL should be a LABEL_REF.
692 LAST_INSN, if non-0, is the rtx we should consider as the last
693 insn emitted (for the purposes of cleaning up a return). */
695 static void
696 expand_goto_internal (tree body, rtx label, rtx last_insn)
698 struct nesting *block;
699 rtx stack_level = 0;
701 if (GET_CODE (label) != CODE_LABEL)
702 abort ();
704 /* If label has already been defined, we can tell now
705 whether and how we must alter the stack level. */
707 if (PREV_INSN (label) != 0)
709 /* Find the innermost pending block that contains the label.
710 (Check containment by comparing insn-uids.)
711 Then restore the outermost stack level within that block,
712 and do cleanups of all blocks contained in it. */
713 for (block = block_stack; block; block = block->next)
715 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
716 break;
717 if (block->data.block.stack_level != 0)
718 stack_level = block->data.block.stack_level;
719 /* Execute the cleanups for blocks we are exiting. */
720 if (block->data.block.cleanups != 0)
722 expand_cleanups (block->data.block.cleanups, 1, 1);
723 do_pending_stack_adjust ();
727 if (stack_level)
729 /* Ensure stack adjust isn't done by emit_jump, as this
730 would clobber the stack pointer. This one should be
731 deleted as dead by flow. */
732 clear_pending_stack_adjust ();
733 do_pending_stack_adjust ();
735 /* Don't do this adjust if it's to the end label and this function
736 is to return with a depressed stack pointer. */
737 if (label == return_label
738 && (((TREE_CODE (TREE_TYPE (current_function_decl))
739 == FUNCTION_TYPE)
740 && (TYPE_RETURNS_STACK_DEPRESSED
741 (TREE_TYPE (current_function_decl))))))
743 else
744 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
747 if (body != 0 && DECL_TOO_LATE (body))
748 error ("jump to `%s' invalidly jumps into binding contour",
749 IDENTIFIER_POINTER (DECL_NAME (body)));
751 /* Label not yet defined: may need to put this goto
752 on the fixup list. */
753 else if (! expand_fixup (body, label, last_insn))
755 /* No fixup needed. Record that the label is the target
756 of at least one goto that has no fixup. */
757 if (body != 0)
758 TREE_ADDRESSABLE (body) = 1;
761 emit_jump (label);
764 /* Generate if necessary a fixup for a goto
765 whose target label in tree structure (if any) is TREE_LABEL
766 and whose target in rtl is RTL_LABEL.
768 If LAST_INSN is nonzero, we pretend that the jump appears
769 after insn LAST_INSN instead of at the current point in the insn stream.
771 The fixup will be used later to insert insns just before the goto.
772 Those insns will restore the stack level as appropriate for the
773 target label, and will (in the case of C++) also invoke any object
774 destructors which have to be invoked when we exit the scopes which
775 are exited by the goto.
777 Value is nonzero if a fixup is made. */
779 static int
780 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
782 struct nesting *block, *end_block;
784 /* See if we can recognize which block the label will be output in.
785 This is possible in some very common cases.
786 If we succeed, set END_BLOCK to that block.
787 Otherwise, set it to 0. */
789 if (cond_stack
790 && (rtl_label == cond_stack->data.cond.endif_label
791 || rtl_label == cond_stack->data.cond.next_label))
792 end_block = cond_stack;
793 /* If we are in a loop, recognize certain labels which
794 are likely targets. This reduces the number of fixups
795 we need to create. */
796 else if (loop_stack
797 && (rtl_label == loop_stack->data.loop.start_label
798 || rtl_label == loop_stack->data.loop.end_label
799 || rtl_label == loop_stack->data.loop.continue_label))
800 end_block = loop_stack;
801 else
802 end_block = 0;
804 /* Now set END_BLOCK to the binding level to which we will return. */
806 if (end_block)
808 struct nesting *next_block = end_block->all;
809 block = block_stack;
811 /* First see if the END_BLOCK is inside the innermost binding level.
812 If so, then no cleanups or stack levels are relevant. */
813 while (next_block && next_block != block)
814 next_block = next_block->all;
816 if (next_block)
817 return 0;
819 /* Otherwise, set END_BLOCK to the innermost binding level
820 which is outside the relevant control-structure nesting. */
821 next_block = block_stack->next;
822 for (block = block_stack; block != end_block; block = block->all)
823 if (block == next_block)
824 next_block = next_block->next;
825 end_block = next_block;
828 /* Does any containing block have a stack level or cleanups?
829 If not, no fixup is needed, and that is the normal case
830 (the only case, for standard C). */
831 for (block = block_stack; block != end_block; block = block->next)
832 if (block->data.block.stack_level != 0
833 || block->data.block.cleanups != 0)
834 break;
836 if (block != end_block)
838 /* Ok, a fixup is needed. Add a fixup to the list of such. */
839 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
840 /* In case an old stack level is restored, make sure that comes
841 after any pending stack adjust. */
842 /* ?? If the fixup isn't to come at the present position,
843 doing the stack adjust here isn't useful. Doing it with our
844 settings at that location isn't useful either. Let's hope
845 someone does it! */
846 if (last_insn == 0)
847 do_pending_stack_adjust ();
848 fixup->target = tree_label;
849 fixup->target_rtl = rtl_label;
851 /* Create a BLOCK node and a corresponding matched set of
852 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
853 this point. The notes will encapsulate any and all fixup
854 code which we might later insert at this point in the insn
855 stream. Also, the BLOCK node will be the parent (i.e. the
856 `SUPERBLOCK') of any other BLOCK nodes which we might create
857 later on when we are expanding the fixup code.
859 Note that optimization passes (including expand_end_loop)
860 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
861 as a placeholder. */
864 rtx original_before_jump
865 = last_insn ? last_insn : get_last_insn ();
866 rtx start;
867 rtx end;
868 tree block;
870 block = make_node (BLOCK);
871 TREE_USED (block) = 1;
873 if (!cfun->x_whole_function_mode_p)
874 (*lang_hooks.decls.insert_block) (block);
875 else
877 BLOCK_CHAIN (block)
878 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
879 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
880 = block;
883 start_sequence ();
884 start = emit_note (NOTE_INSN_BLOCK_BEG);
885 if (cfun->x_whole_function_mode_p)
886 NOTE_BLOCK (start) = block;
887 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
888 end = emit_note (NOTE_INSN_BLOCK_END);
889 if (cfun->x_whole_function_mode_p)
890 NOTE_BLOCK (end) = block;
891 fixup->context = block;
892 end_sequence ();
893 emit_insn_after (start, original_before_jump);
896 fixup->block_start_count = current_block_start_count;
897 fixup->stack_level = 0;
898 fixup->cleanup_list_list
899 = ((block->data.block.outer_cleanups
900 || block->data.block.cleanups)
901 ? tree_cons (NULL_TREE, block->data.block.cleanups,
902 block->data.block.outer_cleanups)
903 : 0);
904 fixup->next = goto_fixup_chain;
905 goto_fixup_chain = fixup;
908 return block != 0;
911 /* Expand any needed fixups in the outputmost binding level of the
912 function. FIRST_INSN is the first insn in the function. */
914 void
915 expand_fixups (rtx first_insn)
917 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
920 /* When exiting a binding contour, process all pending gotos requiring fixups.
921 THISBLOCK is the structure that describes the block being exited.
922 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
923 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
924 FIRST_INSN is the insn that began this contour.
926 Gotos that jump out of this contour must restore the
927 stack level and do the cleanups before actually jumping.
929 DONT_JUMP_IN positive means report error if there is a jump into this
930 contour from before the beginning of the contour. This is also done if
931 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
933 static void
934 fixup_gotos (struct nesting *thisblock, rtx stack_level,
935 tree cleanup_list, rtx first_insn, int dont_jump_in)
937 struct goto_fixup *f, *prev;
939 /* F is the fixup we are considering; PREV is the previous one. */
940 /* We run this loop in two passes so that cleanups of exited blocks
941 are run first, and blocks that are exited are marked so
942 afterwards. */
944 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
946 /* Test for a fixup that is inactive because it is already handled. */
947 if (f->before_jump == 0)
949 /* Delete inactive fixup from the chain, if that is easy to do. */
950 if (prev != 0)
951 prev->next = f->next;
953 /* Has this fixup's target label been defined?
954 If so, we can finalize it. */
955 else if (PREV_INSN (f->target_rtl) != 0)
957 rtx cleanup_insns;
959 /* If this fixup jumped into this contour from before the beginning
960 of this contour, report an error. This code used to use
961 the first non-label insn after f->target_rtl, but that's
962 wrong since such can be added, by things like put_var_into_stack
963 and have INSN_UIDs that are out of the range of the block. */
964 /* ??? Bug: this does not detect jumping in through intermediate
965 blocks that have stack levels or cleanups.
966 It detects only a problem with the innermost block
967 around the label. */
968 if (f->target != 0
969 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
970 || cleanup_list)
971 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
972 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
973 && ! DECL_ERROR_ISSUED (f->target))
975 error ("%Jlabel '%D' used before containing binding contour",
976 f->target, f->target);
977 /* Prevent multiple errors for one label. */
978 DECL_ERROR_ISSUED (f->target) = 1;
981 /* We will expand the cleanups into a sequence of their own and
982 then later on we will attach this new sequence to the insn
983 stream just ahead of the actual jump insn. */
985 start_sequence ();
987 /* Temporarily restore the lexical context where we will
988 logically be inserting the fixup code. We do this for the
989 sake of getting the debugging information right. */
991 (*lang_hooks.decls.pushlevel) (0);
992 (*lang_hooks.decls.set_block) (f->context);
994 /* Expand the cleanups for blocks this jump exits. */
995 if (f->cleanup_list_list)
997 tree lists;
998 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
999 /* Marked elements correspond to blocks that have been closed.
1000 Do their cleanups. */
1001 if (TREE_ADDRESSABLE (lists)
1002 && TREE_VALUE (lists) != 0)
1004 expand_cleanups (TREE_VALUE (lists), 1, 1);
1005 /* Pop any pushes done in the cleanups,
1006 in case function is about to return. */
1007 do_pending_stack_adjust ();
1011 /* Restore stack level for the biggest contour that this
1012 jump jumps out of. */
1013 if (f->stack_level
1014 && ! (f->target_rtl == return_label
1015 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1016 == FUNCTION_TYPE)
1017 && (TYPE_RETURNS_STACK_DEPRESSED
1018 (TREE_TYPE (current_function_decl))))))
1019 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1021 /* Finish up the sequence containing the insns which implement the
1022 necessary cleanups, and then attach that whole sequence to the
1023 insn stream just ahead of the actual jump insn. Attaching it
1024 at that point insures that any cleanups which are in fact
1025 implicit C++ object destructions (which must be executed upon
1026 leaving the block) appear (to the debugger) to be taking place
1027 in an area of the generated code where the object(s) being
1028 destructed are still "in scope". */
1030 cleanup_insns = get_insns ();
1031 (*lang_hooks.decls.poplevel) (1, 0, 0);
1033 end_sequence ();
1034 emit_insn_after (cleanup_insns, f->before_jump);
1036 f->before_jump = 0;
1040 /* For any still-undefined labels, do the cleanups for this block now.
1041 We must do this now since items in the cleanup list may go out
1042 of scope when the block ends. */
1043 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1044 if (f->before_jump != 0
1045 && PREV_INSN (f->target_rtl) == 0
1046 /* Label has still not appeared. If we are exiting a block with
1047 a stack level to restore, that started before the fixup,
1048 mark this stack level as needing restoration
1049 when the fixup is later finalized. */
1050 && thisblock != 0
1051 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1052 means the label is undefined. That's erroneous, but possible. */
1053 && (thisblock->data.block.block_start_count
1054 <= f->block_start_count))
1056 tree lists = f->cleanup_list_list;
1057 rtx cleanup_insns;
1059 for (; lists; lists = TREE_CHAIN (lists))
1060 /* If the following elt. corresponds to our containing block
1061 then the elt. must be for this block. */
1062 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1064 start_sequence ();
1065 (*lang_hooks.decls.pushlevel) (0);
1066 (*lang_hooks.decls.set_block) (f->context);
1067 expand_cleanups (TREE_VALUE (lists), 1, 1);
1068 do_pending_stack_adjust ();
1069 cleanup_insns = get_insns ();
1070 (*lang_hooks.decls.poplevel) (1, 0, 0);
1071 end_sequence ();
1072 if (cleanup_insns != 0)
1073 f->before_jump
1074 = emit_insn_after (cleanup_insns, f->before_jump);
1076 f->cleanup_list_list = TREE_CHAIN (lists);
1079 if (stack_level)
1080 f->stack_level = stack_level;
1084 /* Return the number of times character C occurs in string S. */
1085 static int
1086 n_occurrences (int c, const char *s)
1088 int n = 0;
1089 while (*s)
1090 n += (*s++ == c);
1091 return n;
1094 /* Generate RTL for an asm statement (explicit assembler code).
1095 STRING is a STRING_CST node containing the assembler code text,
1096 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1097 insn is volatile; don't optimize it. */
1099 void
1100 expand_asm (tree string, int vol)
1102 rtx body;
1104 if (TREE_CODE (string) == ADDR_EXPR)
1105 string = TREE_OPERAND (string, 0);
1107 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1109 MEM_VOLATILE_P (body) = vol;
1111 emit_insn (body);
1113 clear_last_expr ();
1116 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1117 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1118 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1119 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1120 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1121 constraint allows the use of a register operand. And, *IS_INOUT
1122 will be true if the operand is read-write, i.e., if it is used as
1123 an input as well as an output. If *CONSTRAINT_P is not in
1124 canonical form, it will be made canonical. (Note that `+' will be
1125 replaced with `=' as part of this process.)
1127 Returns TRUE if all went well; FALSE if an error occurred. */
1129 bool
1130 parse_output_constraint (const char **constraint_p, int operand_num,
1131 int ninputs, int noutputs, bool *allows_mem,
1132 bool *allows_reg, bool *is_inout)
1134 const char *constraint = *constraint_p;
1135 const char *p;
1137 /* Assume the constraint doesn't allow the use of either a register
1138 or memory. */
1139 *allows_mem = false;
1140 *allows_reg = false;
1142 /* Allow the `=' or `+' to not be at the beginning of the string,
1143 since it wasn't explicitly documented that way, and there is a
1144 large body of code that puts it last. Swap the character to
1145 the front, so as not to uglify any place else. */
1146 p = strchr (constraint, '=');
1147 if (!p)
1148 p = strchr (constraint, '+');
1150 /* If the string doesn't contain an `=', issue an error
1151 message. */
1152 if (!p)
1154 error ("output operand constraint lacks `='");
1155 return false;
1158 /* If the constraint begins with `+', then the operand is both read
1159 from and written to. */
1160 *is_inout = (*p == '+');
1162 /* Canonicalize the output constraint so that it begins with `='. */
1163 if (p != constraint || is_inout)
1165 char *buf;
1166 size_t c_len = strlen (constraint);
1168 if (p != constraint)
1169 warning ("output constraint `%c' for operand %d is not at the beginning",
1170 *p, operand_num);
1172 /* Make a copy of the constraint. */
1173 buf = alloca (c_len + 1);
1174 strcpy (buf, constraint);
1175 /* Swap the first character and the `=' or `+'. */
1176 buf[p - constraint] = buf[0];
1177 /* Make sure the first character is an `='. (Until we do this,
1178 it might be a `+'.) */
1179 buf[0] = '=';
1180 /* Replace the constraint with the canonicalized string. */
1181 *constraint_p = ggc_alloc_string (buf, c_len);
1182 constraint = *constraint_p;
1185 /* Loop through the constraint string. */
1186 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1187 switch (*p)
1189 case '+':
1190 case '=':
1191 error ("operand constraint contains incorrectly positioned '+' or '='");
1192 return false;
1194 case '%':
1195 if (operand_num + 1 == ninputs + noutputs)
1197 error ("`%%' constraint used with last operand");
1198 return false;
1200 break;
1202 case 'V': case 'm': case 'o':
1203 *allows_mem = true;
1204 break;
1206 case '?': case '!': case '*': case '&': case '#':
1207 case 'E': case 'F': case 'G': case 'H':
1208 case 's': case 'i': case 'n':
1209 case 'I': case 'J': case 'K': case 'L': case 'M':
1210 case 'N': case 'O': case 'P': case ',':
1211 break;
1213 case '0': case '1': case '2': case '3': case '4':
1214 case '5': case '6': case '7': case '8': case '9':
1215 case '[':
1216 error ("matching constraint not valid in output operand");
1217 return false;
1219 case '<': case '>':
1220 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1221 excepting those that expand_call created. So match memory
1222 and hope. */
1223 *allows_mem = true;
1224 break;
1226 case 'g': case 'X':
1227 *allows_reg = true;
1228 *allows_mem = true;
1229 break;
1231 case 'p': case 'r':
1232 *allows_reg = true;
1233 break;
1235 default:
1236 if (!ISALPHA (*p))
1237 break;
1238 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1239 *allows_reg = true;
1240 #ifdef EXTRA_CONSTRAINT_STR
1241 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1242 *allows_reg = true;
1243 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1244 *allows_mem = true;
1245 else
1247 /* Otherwise we can't assume anything about the nature of
1248 the constraint except that it isn't purely registers.
1249 Treat it like "g" and hope for the best. */
1250 *allows_reg = true;
1251 *allows_mem = true;
1253 #endif
1254 break;
1257 if (*is_inout && !*allows_reg)
1258 warning ("read-write constraint does not allow a register");
1260 return true;
1263 /* Similar, but for input constraints. */
1265 bool
1266 parse_input_constraint (const char **constraint_p, int input_num,
1267 int ninputs, int noutputs, int ninout,
1268 const char * const * constraints,
1269 bool *allows_mem, bool *allows_reg)
1271 const char *constraint = *constraint_p;
1272 const char *orig_constraint = constraint;
1273 size_t c_len = strlen (constraint);
1274 size_t j;
1275 bool saw_match = false;
1277 /* Assume the constraint doesn't allow the use of either
1278 a register or memory. */
1279 *allows_mem = false;
1280 *allows_reg = false;
1282 /* Make sure constraint has neither `=', `+', nor '&'. */
1284 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1285 switch (constraint[j])
1287 case '+': case '=': case '&':
1288 if (constraint == orig_constraint)
1290 error ("input operand constraint contains `%c'", constraint[j]);
1291 return false;
1293 break;
1295 case '%':
1296 if (constraint == orig_constraint
1297 && input_num + 1 == ninputs - ninout)
1299 error ("`%%' constraint used with last operand");
1300 return false;
1302 break;
1304 case 'V': case 'm': case 'o':
1305 *allows_mem = true;
1306 break;
1308 case '<': case '>':
1309 case '?': case '!': case '*': case '#':
1310 case 'E': case 'F': case 'G': case 'H':
1311 case 's': case 'i': case 'n':
1312 case 'I': case 'J': case 'K': case 'L': case 'M':
1313 case 'N': case 'O': case 'P': case ',':
1314 break;
1316 /* Whether or not a numeric constraint allows a register is
1317 decided by the matching constraint, and so there is no need
1318 to do anything special with them. We must handle them in
1319 the default case, so that we don't unnecessarily force
1320 operands to memory. */
1321 case '0': case '1': case '2': case '3': case '4':
1322 case '5': case '6': case '7': case '8': case '9':
1324 char *end;
1325 unsigned long match;
1327 saw_match = true;
1329 match = strtoul (constraint + j, &end, 10);
1330 if (match >= (unsigned long) noutputs)
1332 error ("matching constraint references invalid operand number");
1333 return false;
1336 /* Try and find the real constraint for this dup. Only do this
1337 if the matching constraint is the only alternative. */
1338 if (*end == '\0'
1339 && (j == 0 || (j == 1 && constraint[0] == '%')))
1341 constraint = constraints[match];
1342 *constraint_p = constraint;
1343 c_len = strlen (constraint);
1344 j = 0;
1345 /* ??? At the end of the loop, we will skip the first part of
1346 the matched constraint. This assumes not only that the
1347 other constraint is an output constraint, but also that
1348 the '=' or '+' come first. */
1349 break;
1351 else
1352 j = end - constraint;
1353 /* Anticipate increment at end of loop. */
1354 j--;
1356 /* Fall through. */
1358 case 'p': case 'r':
1359 *allows_reg = true;
1360 break;
1362 case 'g': case 'X':
1363 *allows_reg = true;
1364 *allows_mem = true;
1365 break;
1367 default:
1368 if (! ISALPHA (constraint[j]))
1370 error ("invalid punctuation `%c' in constraint", constraint[j]);
1371 return false;
1373 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1374 != NO_REGS)
1375 *allows_reg = true;
1376 #ifdef EXTRA_CONSTRAINT_STR
1377 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1378 *allows_reg = true;
1379 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1380 *allows_mem = true;
1381 else
1383 /* Otherwise we can't assume anything about the nature of
1384 the constraint except that it isn't purely registers.
1385 Treat it like "g" and hope for the best. */
1386 *allows_reg = true;
1387 *allows_mem = true;
1389 #endif
1390 break;
1393 if (saw_match && !*allows_reg)
1394 warning ("matching constraint does not allow a register");
1396 return true;
1399 /* Check for overlap between registers marked in CLOBBERED_REGS and
1400 anything inappropriate in DECL. Emit error and return TRUE for error,
1401 FALSE for ok. */
1403 static bool
1404 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1406 /* Conflicts between asm-declared register variables and the clobber
1407 list are not allowed. */
1408 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1409 && DECL_REGISTER (decl)
1410 && REG_P (DECL_RTL (decl))
1411 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1413 rtx reg = DECL_RTL (decl);
1414 unsigned int regno;
1416 for (regno = REGNO (reg);
1417 regno < (REGNO (reg)
1418 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1419 regno++)
1420 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1422 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1423 IDENTIFIER_POINTER (DECL_NAME (decl)));
1425 /* Reset registerness to stop multiple errors emitted for a
1426 single variable. */
1427 DECL_REGISTER (decl) = 0;
1428 return true;
1431 return false;
1434 /* Generate RTL for an asm statement with arguments.
1435 STRING is the instruction template.
1436 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1437 Each output or input has an expression in the TREE_VALUE and
1438 and a tree list in TREE_PURPOSE which in turn contains a constraint
1439 name in TREE_VALUE (or NULL_TREE) and a constraint string
1440 in TREE_PURPOSE.
1441 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1442 that is clobbered by this insn.
1444 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1445 Some elements of OUTPUTS may be replaced with trees representing temporary
1446 values. The caller should copy those temporary values to the originally
1447 specified lvalues.
1449 VOL nonzero means the insn is volatile; don't optimize it. */
1451 void
1452 expand_asm_operands (tree string, tree outputs, tree inputs,
1453 tree clobbers, int vol, location_t locus)
1455 rtvec argvec, constraintvec;
1456 rtx body;
1457 int ninputs = list_length (inputs);
1458 int noutputs = list_length (outputs);
1459 int ninout;
1460 int nclobbers;
1461 HARD_REG_SET clobbered_regs;
1462 int clobber_conflict_found = 0;
1463 tree tail;
1464 tree t;
1465 int i;
1466 /* Vector of RTX's of evaluated output operands. */
1467 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1468 int *inout_opnum = alloca (noutputs * sizeof (int));
1469 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1470 enum machine_mode *inout_mode
1471 = alloca (noutputs * sizeof (enum machine_mode));
1472 const char **constraints
1473 = alloca ((noutputs + ninputs) * sizeof (const char *));
1474 int old_generating_concat_p = generating_concat_p;
1476 /* An ASM with no outputs needs to be treated as volatile, for now. */
1477 if (noutputs == 0)
1478 vol = 1;
1480 if (! check_operand_nalternatives (outputs, inputs))
1481 return;
1483 string = resolve_asm_operand_names (string, outputs, inputs);
1485 /* Collect constraints. */
1486 i = 0;
1487 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1488 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1489 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1490 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1492 #ifdef MD_ASM_CLOBBERS
1493 /* Sometimes we wish to automatically clobber registers across an asm.
1494 Case in point is when the i386 backend moved from cc0 to a hard reg --
1495 maintaining source-level compatibility means automatically clobbering
1496 the flags register. */
1497 MD_ASM_CLOBBERS (clobbers);
1498 #endif
1500 /* Count the number of meaningful clobbered registers, ignoring what
1501 we would ignore later. */
1502 nclobbers = 0;
1503 CLEAR_HARD_REG_SET (clobbered_regs);
1504 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1506 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1508 i = decode_reg_name (regname);
1509 if (i >= 0 || i == -4)
1510 ++nclobbers;
1511 else if (i == -2)
1512 error ("unknown register name `%s' in `asm'", regname);
1514 /* Mark clobbered registers. */
1515 if (i >= 0)
1517 /* Clobbering the PIC register is an error */
1518 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1520 error ("PIC register `%s' clobbered in `asm'", regname);
1521 return;
1524 SET_HARD_REG_BIT (clobbered_regs, i);
1528 clear_last_expr ();
1530 /* First pass over inputs and outputs checks validity and sets
1531 mark_addressable if needed. */
1533 ninout = 0;
1534 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1536 tree val = TREE_VALUE (tail);
1537 tree type = TREE_TYPE (val);
1538 const char *constraint;
1539 bool is_inout;
1540 bool allows_reg;
1541 bool allows_mem;
1543 /* If there's an erroneous arg, emit no insn. */
1544 if (type == error_mark_node)
1545 return;
1547 /* Try to parse the output constraint. If that fails, there's
1548 no point in going further. */
1549 constraint = constraints[i];
1550 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1551 &allows_mem, &allows_reg, &is_inout))
1552 return;
1554 if (! allows_reg
1555 && (allows_mem
1556 || is_inout
1557 || (DECL_P (val)
1558 && GET_CODE (DECL_RTL (val)) == REG
1559 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1560 (*lang_hooks.mark_addressable) (val);
1562 if (is_inout)
1563 ninout++;
1566 ninputs += ninout;
1567 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1569 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1570 return;
1573 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1575 bool allows_reg, allows_mem;
1576 const char *constraint;
1578 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1579 would get VOIDmode and that could cause a crash in reload. */
1580 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1581 return;
1583 constraint = constraints[i + noutputs];
1584 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1585 constraints, &allows_mem, &allows_reg))
1586 return;
1588 if (! allows_reg && allows_mem)
1589 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1592 /* Second pass evaluates arguments. */
1594 ninout = 0;
1595 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1597 tree val = TREE_VALUE (tail);
1598 tree type = TREE_TYPE (val);
1599 bool is_inout;
1600 bool allows_reg;
1601 bool allows_mem;
1602 rtx op;
1604 if (!parse_output_constraint (&constraints[i], i, ninputs,
1605 noutputs, &allows_mem, &allows_reg,
1606 &is_inout))
1607 abort ();
1609 /* If an output operand is not a decl or indirect ref and our constraint
1610 allows a register, make a temporary to act as an intermediate.
1611 Make the asm insn write into that, then our caller will copy it to
1612 the real output operand. Likewise for promoted variables. */
1614 generating_concat_p = 0;
1616 real_output_rtx[i] = NULL_RTX;
1617 if ((TREE_CODE (val) == INDIRECT_REF
1618 && allows_mem)
1619 || (DECL_P (val)
1620 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1621 && ! (GET_CODE (DECL_RTL (val)) == REG
1622 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1623 || ! allows_reg
1624 || is_inout)
1626 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1627 if (GET_CODE (op) == MEM)
1628 op = validize_mem (op);
1630 if (! allows_reg && GET_CODE (op) != MEM)
1631 error ("output number %d not directly addressable", i);
1632 if ((! allows_mem && GET_CODE (op) == MEM)
1633 || GET_CODE (op) == CONCAT)
1635 real_output_rtx[i] = protect_from_queue (op, 1);
1636 op = gen_reg_rtx (GET_MODE (op));
1637 if (is_inout)
1638 emit_move_insn (op, real_output_rtx[i]);
1641 else
1643 op = assign_temp (type, 0, 0, 1);
1644 op = validize_mem (op);
1645 TREE_VALUE (tail) = make_tree (type, op);
1647 output_rtx[i] = op;
1649 generating_concat_p = old_generating_concat_p;
1651 if (is_inout)
1653 inout_mode[ninout] = TYPE_MODE (type);
1654 inout_opnum[ninout++] = i;
1657 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1658 clobber_conflict_found = 1;
1661 /* Make vectors for the expression-rtx, constraint strings,
1662 and named operands. */
1664 argvec = rtvec_alloc (ninputs);
1665 constraintvec = rtvec_alloc (ninputs);
1667 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1668 : GET_MODE (output_rtx[0])),
1669 TREE_STRING_POINTER (string),
1670 empty_string, 0, argvec, constraintvec,
1671 locus.file, locus.line);
1673 MEM_VOLATILE_P (body) = vol;
1675 /* Eval the inputs and put them into ARGVEC.
1676 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1678 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1680 bool allows_reg, allows_mem;
1681 const char *constraint;
1682 tree val, type;
1683 rtx op;
1685 constraint = constraints[i + noutputs];
1686 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1687 constraints, &allows_mem, &allows_reg))
1688 abort ();
1690 generating_concat_p = 0;
1692 val = TREE_VALUE (tail);
1693 type = TREE_TYPE (val);
1694 op = expand_expr (val, NULL_RTX, VOIDmode,
1695 (allows_mem && !allows_reg
1696 ? EXPAND_MEMORY : EXPAND_NORMAL));
1698 /* Never pass a CONCAT to an ASM. */
1699 if (GET_CODE (op) == CONCAT)
1700 op = force_reg (GET_MODE (op), op);
1701 else if (GET_CODE (op) == MEM)
1702 op = validize_mem (op);
1704 if (asm_operand_ok (op, constraint) <= 0)
1706 if (allows_reg)
1707 op = force_reg (TYPE_MODE (type), op);
1708 else if (!allows_mem)
1709 warning ("asm operand %d probably doesn't match constraints",
1710 i + noutputs);
1711 else if (GET_CODE (op) == MEM)
1713 /* We won't recognize either volatile memory or memory
1714 with a queued address as available a memory_operand
1715 at this point. Ignore it: clearly this *is* a memory. */
1717 else
1719 warning ("use of memory input without lvalue in "
1720 "asm operand %d is deprecated", i + noutputs);
1722 if (CONSTANT_P (op))
1724 rtx mem = force_const_mem (TYPE_MODE (type), op);
1725 if (mem)
1726 op = validize_mem (mem);
1727 else
1728 op = force_reg (TYPE_MODE (type), op);
1730 if (GET_CODE (op) == REG
1731 || GET_CODE (op) == SUBREG
1732 || GET_CODE (op) == ADDRESSOF
1733 || GET_CODE (op) == CONCAT)
1735 tree qual_type = build_qualified_type (type,
1736 (TYPE_QUALS (type)
1737 | TYPE_QUAL_CONST));
1738 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1739 memloc = validize_mem (memloc);
1740 emit_move_insn (memloc, op);
1741 op = memloc;
1746 generating_concat_p = old_generating_concat_p;
1747 ASM_OPERANDS_INPUT (body, i) = op;
1749 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1750 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1752 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1753 clobber_conflict_found = 1;
1756 /* Protect all the operands from the queue now that they have all been
1757 evaluated. */
1759 generating_concat_p = 0;
1761 for (i = 0; i < ninputs - ninout; i++)
1762 ASM_OPERANDS_INPUT (body, i)
1763 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1765 for (i = 0; i < noutputs; i++)
1766 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1768 /* For in-out operands, copy output rtx to input rtx. */
1769 for (i = 0; i < ninout; i++)
1771 int j = inout_opnum[i];
1772 char buffer[16];
1774 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1775 = output_rtx[j];
1777 sprintf (buffer, "%d", j);
1778 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1779 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1782 generating_concat_p = old_generating_concat_p;
1784 /* Now, for each output, construct an rtx
1785 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1786 ARGVEC CONSTRAINTS OPNAMES))
1787 If there is more than one, put them inside a PARALLEL. */
1789 if (noutputs == 1 && nclobbers == 0)
1791 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1792 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1795 else if (noutputs == 0 && nclobbers == 0)
1797 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1798 emit_insn (body);
1801 else
1803 rtx obody = body;
1804 int num = noutputs;
1806 if (num == 0)
1807 num = 1;
1809 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1811 /* For each output operand, store a SET. */
1812 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1814 XVECEXP (body, 0, i)
1815 = gen_rtx_SET (VOIDmode,
1816 output_rtx[i],
1817 gen_rtx_ASM_OPERANDS
1818 (GET_MODE (output_rtx[i]),
1819 TREE_STRING_POINTER (string),
1820 constraints[i], i, argvec, constraintvec,
1821 locus.file, locus.line));
1823 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1826 /* If there are no outputs (but there are some clobbers)
1827 store the bare ASM_OPERANDS into the PARALLEL. */
1829 if (i == 0)
1830 XVECEXP (body, 0, i++) = obody;
1832 /* Store (clobber REG) for each clobbered register specified. */
1834 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1836 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1837 int j = decode_reg_name (regname);
1838 rtx clobbered_reg;
1840 if (j < 0)
1842 if (j == -3) /* `cc', which is not a register */
1843 continue;
1845 if (j == -4) /* `memory', don't cache memory across asm */
1847 XVECEXP (body, 0, i++)
1848 = gen_rtx_CLOBBER (VOIDmode,
1849 gen_rtx_MEM
1850 (BLKmode,
1851 gen_rtx_SCRATCH (VOIDmode)));
1852 continue;
1855 /* Ignore unknown register, error already signaled. */
1856 continue;
1859 /* Use QImode since that's guaranteed to clobber just one reg. */
1860 clobbered_reg = gen_rtx_REG (QImode, j);
1862 /* Do sanity check for overlap between clobbers and respectively
1863 input and outputs that hasn't been handled. Such overlap
1864 should have been detected and reported above. */
1865 if (!clobber_conflict_found)
1867 int opno;
1869 /* We test the old body (obody) contents to avoid tripping
1870 over the under-construction body. */
1871 for (opno = 0; opno < noutputs; opno++)
1872 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1873 internal_error ("asm clobber conflict with output operand");
1875 for (opno = 0; opno < ninputs - ninout; opno++)
1876 if (reg_overlap_mentioned_p (clobbered_reg,
1877 ASM_OPERANDS_INPUT (obody, opno)))
1878 internal_error ("asm clobber conflict with input operand");
1881 XVECEXP (body, 0, i++)
1882 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1885 emit_insn (body);
1888 /* For any outputs that needed reloading into registers, spill them
1889 back to where they belong. */
1890 for (i = 0; i < noutputs; ++i)
1891 if (real_output_rtx[i])
1892 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1894 free_temp_slots ();
1897 /* A subroutine of expand_asm_operands. Check that all operands have
1898 the same number of alternatives. Return true if so. */
1900 static bool
1901 check_operand_nalternatives (tree outputs, tree inputs)
1903 if (outputs || inputs)
1905 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1906 int nalternatives
1907 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1908 tree next = inputs;
1910 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1912 error ("too many alternatives in `asm'");
1913 return false;
1916 tmp = outputs;
1917 while (tmp)
1919 const char *constraint
1920 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1922 if (n_occurrences (',', constraint) != nalternatives)
1924 error ("operand constraints for `asm' differ in number of alternatives");
1925 return false;
1928 if (TREE_CHAIN (tmp))
1929 tmp = TREE_CHAIN (tmp);
1930 else
1931 tmp = next, next = 0;
1935 return true;
1938 /* A subroutine of expand_asm_operands. Check that all operand names
1939 are unique. Return true if so. We rely on the fact that these names
1940 are identifiers, and so have been canonicalized by get_identifier,
1941 so all we need are pointer comparisons. */
1943 static bool
1944 check_unique_operand_names (tree outputs, tree inputs)
1946 tree i, j;
1948 for (i = outputs; i ; i = TREE_CHAIN (i))
1950 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1951 if (! i_name)
1952 continue;
1954 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1955 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1956 goto failure;
1959 for (i = inputs; i ; i = TREE_CHAIN (i))
1961 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1962 if (! i_name)
1963 continue;
1965 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1966 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1967 goto failure;
1968 for (j = outputs; j ; j = TREE_CHAIN (j))
1969 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1970 goto failure;
1973 return true;
1975 failure:
1976 error ("duplicate asm operand name '%s'",
1977 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1978 return false;
1981 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1982 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1983 STRING and in the constraints to those numbers. */
1985 tree
1986 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1988 char *buffer;
1989 char *p;
1990 const char *c;
1991 tree t;
1993 check_unique_operand_names (outputs, inputs);
1995 /* Substitute [<name>] in input constraint strings. There should be no
1996 named operands in output constraints. */
1997 for (t = inputs; t ; t = TREE_CHAIN (t))
1999 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2000 if (strchr (c, '[') != NULL)
2002 p = buffer = xstrdup (c);
2003 while ((p = strchr (p, '[')) != NULL)
2004 p = resolve_operand_name_1 (p, outputs, inputs);
2005 TREE_VALUE (TREE_PURPOSE (t))
2006 = build_string (strlen (buffer), buffer);
2007 free (buffer);
2011 /* Now check for any needed substitutions in the template. */
2012 c = TREE_STRING_POINTER (string);
2013 while ((c = strchr (c, '%')) != NULL)
2015 if (c[1] == '[')
2016 break;
2017 else if (ISALPHA (c[1]) && c[2] == '[')
2018 break;
2019 else
2021 c += 1;
2022 continue;
2026 if (c)
2028 /* OK, we need to make a copy so we can perform the substitutions.
2029 Assume that we will not need extra space--we get to remove '['
2030 and ']', which means we cannot have a problem until we have more
2031 than 999 operands. */
2032 buffer = xstrdup (TREE_STRING_POINTER (string));
2033 p = buffer + (c - TREE_STRING_POINTER (string));
2035 while ((p = strchr (p, '%')) != NULL)
2037 if (p[1] == '[')
2038 p += 1;
2039 else if (ISALPHA (p[1]) && p[2] == '[')
2040 p += 2;
2041 else
2043 p += 1;
2044 continue;
2047 p = resolve_operand_name_1 (p, outputs, inputs);
2050 string = build_string (strlen (buffer), buffer);
2051 free (buffer);
2054 return string;
2057 /* A subroutine of resolve_operand_names. P points to the '[' for a
2058 potential named operand of the form [<name>]. In place, replace
2059 the name and brackets with a number. Return a pointer to the
2060 balance of the string after substitution. */
2062 static char *
2063 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2065 char *q;
2066 int op;
2067 tree t;
2068 size_t len;
2070 /* Collect the operand name. */
2071 q = strchr (p, ']');
2072 if (!q)
2074 error ("missing close brace for named operand");
2075 return strchr (p, '\0');
2077 len = q - p - 1;
2079 /* Resolve the name to a number. */
2080 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2082 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2083 if (name)
2085 const char *c = TREE_STRING_POINTER (name);
2086 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2087 goto found;
2090 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2092 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2093 if (name)
2095 const char *c = TREE_STRING_POINTER (name);
2096 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2097 goto found;
2101 *q = '\0';
2102 error ("undefined named operand '%s'", p + 1);
2103 op = 0;
2104 found:
2106 /* Replace the name with the number. Unfortunately, not all libraries
2107 get the return value of sprintf correct, so search for the end of the
2108 generated string by hand. */
2109 sprintf (p, "%d", op);
2110 p = strchr (p, '\0');
2112 /* Verify the no extra buffer space assumption. */
2113 if (p > q)
2114 abort ();
2116 /* Shift the rest of the buffer down to fill the gap. */
2117 memmove (p, q + 1, strlen (q + 1) + 1);
2119 return p;
2122 /* Generate RTL to evaluate the expression EXP
2123 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2124 Provided just for backward-compatibility. expand_expr_stmt_value()
2125 should be used for new code. */
2127 void
2128 expand_expr_stmt (tree exp)
2130 expand_expr_stmt_value (exp, -1, 1);
2133 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2134 whether to (1) save the value of the expression, (0) discard it or
2135 (-1) use expr_stmts_for_value to tell. The use of -1 is
2136 deprecated, and retained only for backward compatibility. */
2138 void
2139 expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2141 rtx value;
2142 tree type;
2144 if (want_value == -1)
2145 want_value = expr_stmts_for_value != 0;
2147 /* If -Wextra, warn about statements with no side effects,
2148 except for an explicit cast to void (e.g. for assert()), and
2149 except for last statement in ({...}) where they may be useful. */
2150 if (! want_value
2151 && (expr_stmts_for_value == 0 || ! maybe_last)
2152 && exp != error_mark_node
2153 && warn_unused_value)
2155 if (TREE_SIDE_EFFECTS (exp))
2156 warn_if_unused_value (exp);
2157 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
2158 warning ("%Hstatement with no effect", &emit_locus);
2161 /* If EXP is of function type and we are expanding statements for
2162 value, convert it to pointer-to-function. */
2163 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2164 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2166 /* The call to `expand_expr' could cause last_expr_type and
2167 last_expr_value to get reset. Therefore, we set last_expr_value
2168 and last_expr_type *after* calling expand_expr. */
2169 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2170 VOIDmode, 0);
2171 type = TREE_TYPE (exp);
2173 /* If all we do is reference a volatile value in memory,
2174 copy it to a register to be sure it is actually touched. */
2175 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2177 if (TYPE_MODE (type) == VOIDmode)
2179 else if (TYPE_MODE (type) != BLKmode)
2180 value = copy_to_reg (value);
2181 else
2183 rtx lab = gen_label_rtx ();
2185 /* Compare the value with itself to reference it. */
2186 emit_cmp_and_jump_insns (value, value, EQ,
2187 expand_expr (TYPE_SIZE (type),
2188 NULL_RTX, VOIDmode, 0),
2189 BLKmode, 0, lab);
2190 emit_label (lab);
2194 /* If this expression is part of a ({...}) and is in memory, we may have
2195 to preserve temporaries. */
2196 preserve_temp_slots (value);
2198 /* Free any temporaries used to evaluate this expression. Any temporary
2199 used as a result of this expression will already have been preserved
2200 above. */
2201 free_temp_slots ();
2203 if (want_value)
2205 last_expr_value = value;
2206 last_expr_type = type;
2209 emit_queue ();
2212 /* Warn if EXP contains any computations whose results are not used.
2213 Return 1 if a warning is printed; 0 otherwise. */
2216 warn_if_unused_value (tree exp)
2218 if (TREE_USED (exp))
2219 return 0;
2221 /* Don't warn about void constructs. This includes casting to void,
2222 void function calls, and statement expressions with a final cast
2223 to void. */
2224 if (VOID_TYPE_P (TREE_TYPE (exp)))
2225 return 0;
2227 switch (TREE_CODE (exp))
2229 case PREINCREMENT_EXPR:
2230 case POSTINCREMENT_EXPR:
2231 case PREDECREMENT_EXPR:
2232 case POSTDECREMENT_EXPR:
2233 case MODIFY_EXPR:
2234 case INIT_EXPR:
2235 case TARGET_EXPR:
2236 case CALL_EXPR:
2237 case RTL_EXPR:
2238 case TRY_CATCH_EXPR:
2239 case WITH_CLEANUP_EXPR:
2240 case EXIT_EXPR:
2241 return 0;
2243 case BIND_EXPR:
2244 /* For a binding, warn if no side effect within it. */
2245 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2247 case SAVE_EXPR:
2248 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2250 case TRUTH_ORIF_EXPR:
2251 case TRUTH_ANDIF_EXPR:
2252 /* In && or ||, warn if 2nd operand has no side effect. */
2253 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2255 case COMPOUND_EXPR:
2256 if (TREE_NO_UNUSED_WARNING (exp))
2257 return 0;
2258 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2259 return 1;
2260 /* Let people do `(foo (), 0)' without a warning. */
2261 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2262 return 0;
2263 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2265 case NOP_EXPR:
2266 case CONVERT_EXPR:
2267 case NON_LVALUE_EXPR:
2268 /* Don't warn about conversions not explicit in the user's program. */
2269 if (TREE_NO_UNUSED_WARNING (exp))
2270 return 0;
2271 /* Assignment to a cast usually results in a cast of a modify.
2272 Don't complain about that. There can be an arbitrary number of
2273 casts before the modify, so we must loop until we find the first
2274 non-cast expression and then test to see if that is a modify. */
2276 tree tem = TREE_OPERAND (exp, 0);
2278 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2279 tem = TREE_OPERAND (tem, 0);
2281 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2282 || TREE_CODE (tem) == CALL_EXPR)
2283 return 0;
2285 goto maybe_warn;
2287 case INDIRECT_REF:
2288 /* Don't warn about automatic dereferencing of references, since
2289 the user cannot control it. */
2290 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2291 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2292 /* Fall through. */
2294 default:
2295 /* Referencing a volatile value is a side effect, so don't warn. */
2296 if ((DECL_P (exp)
2297 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2298 && TREE_THIS_VOLATILE (exp))
2299 return 0;
2301 /* If this is an expression which has no operands, there is no value
2302 to be unused. There are no such language-independent codes,
2303 but front ends may define such. */
2304 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2305 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2306 return 0;
2308 maybe_warn:
2309 /* If this is an expression with side effects, don't warn. */
2310 if (TREE_SIDE_EFFECTS (exp))
2311 return 0;
2313 warning ("%Hvalue computed is not used", &emit_locus);
2314 return 1;
2318 /* Clear out the memory of the last expression evaluated. */
2320 void
2321 clear_last_expr (void)
2323 last_expr_type = NULL_TREE;
2324 last_expr_value = NULL_RTX;
2327 /* Begin a statement-expression, i.e., a series of statements which
2328 may return a value. Return the RTL_EXPR for this statement expr.
2329 The caller must save that value and pass it to
2330 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2331 in the statement-expression are deallocated at the end of the
2332 expression. */
2334 tree
2335 expand_start_stmt_expr (int has_scope)
2337 tree t;
2339 /* Make the RTL_EXPR node temporary, not momentary,
2340 so that rtl_expr_chain doesn't become garbage. */
2341 t = make_node (RTL_EXPR);
2342 do_pending_stack_adjust ();
2343 if (has_scope)
2344 start_sequence_for_rtl_expr (t);
2345 else
2346 start_sequence ();
2347 NO_DEFER_POP;
2348 expr_stmts_for_value++;
2349 return t;
2352 /* Restore the previous state at the end of a statement that returns a value.
2353 Returns a tree node representing the statement's value and the
2354 insns to compute the value.
2356 The nodes of that expression have been freed by now, so we cannot use them.
2357 But we don't want to do that anyway; the expression has already been
2358 evaluated and now we just want to use the value. So generate a RTL_EXPR
2359 with the proper type and RTL value.
2361 If the last substatement was not an expression,
2362 return something with type `void'. */
2364 tree
2365 expand_end_stmt_expr (tree t)
2367 OK_DEFER_POP;
2369 if (! last_expr_value || ! last_expr_type)
2371 last_expr_value = const0_rtx;
2372 last_expr_type = void_type_node;
2374 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2375 /* Remove any possible QUEUED. */
2376 last_expr_value = protect_from_queue (last_expr_value, 0);
2378 emit_queue ();
2380 TREE_TYPE (t) = last_expr_type;
2381 RTL_EXPR_RTL (t) = last_expr_value;
2382 RTL_EXPR_SEQUENCE (t) = get_insns ();
2384 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2386 end_sequence ();
2388 /* Don't consider deleting this expr or containing exprs at tree level. */
2389 TREE_SIDE_EFFECTS (t) = 1;
2390 /* Propagate volatility of the actual RTL expr. */
2391 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2393 clear_last_expr ();
2394 expr_stmts_for_value--;
2396 return t;
2399 /* Generate RTL for the start of an if-then. COND is the expression
2400 whose truth should be tested.
2402 If EXITFLAG is nonzero, this conditional is visible to
2403 `exit_something'. */
2405 void
2406 expand_start_cond (tree cond, int exitflag)
2408 struct nesting *thiscond = ALLOC_NESTING ();
2410 /* Make an entry on cond_stack for the cond we are entering. */
2412 thiscond->desc = COND_NESTING;
2413 thiscond->next = cond_stack;
2414 thiscond->all = nesting_stack;
2415 thiscond->depth = ++nesting_depth;
2416 thiscond->data.cond.next_label = gen_label_rtx ();
2417 /* Before we encounter an `else', we don't need a separate exit label
2418 unless there are supposed to be exit statements
2419 to exit this conditional. */
2420 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2421 thiscond->data.cond.endif_label = thiscond->exit_label;
2422 cond_stack = thiscond;
2423 nesting_stack = thiscond;
2425 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2428 /* Generate RTL between then-clause and the elseif-clause
2429 of an if-then-elseif-.... */
2431 void
2432 expand_start_elseif (tree cond)
2434 if (cond_stack->data.cond.endif_label == 0)
2435 cond_stack->data.cond.endif_label = gen_label_rtx ();
2436 emit_jump (cond_stack->data.cond.endif_label);
2437 emit_label (cond_stack->data.cond.next_label);
2438 cond_stack->data.cond.next_label = gen_label_rtx ();
2439 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2442 /* Generate RTL between the then-clause and the else-clause
2443 of an if-then-else. */
2445 void
2446 expand_start_else (void)
2448 if (cond_stack->data.cond.endif_label == 0)
2449 cond_stack->data.cond.endif_label = gen_label_rtx ();
2451 emit_jump (cond_stack->data.cond.endif_label);
2452 emit_label (cond_stack->data.cond.next_label);
2453 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2456 /* After calling expand_start_else, turn this "else" into an "else if"
2457 by providing another condition. */
2459 void
2460 expand_elseif (tree cond)
2462 cond_stack->data.cond.next_label = gen_label_rtx ();
2463 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2466 /* Generate RTL for the end of an if-then.
2467 Pop the record for it off of cond_stack. */
2469 void
2470 expand_end_cond (void)
2472 struct nesting *thiscond = cond_stack;
2474 do_pending_stack_adjust ();
2475 if (thiscond->data.cond.next_label)
2476 emit_label (thiscond->data.cond.next_label);
2477 if (thiscond->data.cond.endif_label)
2478 emit_label (thiscond->data.cond.endif_label);
2480 POPSTACK (cond_stack);
2481 clear_last_expr ();
2484 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2485 loop should be exited by `exit_something'. This is a loop for which
2486 `expand_continue' will jump to the top of the loop.
2488 Make an entry on loop_stack to record the labels associated with
2489 this loop. */
2491 struct nesting *
2492 expand_start_loop (int exit_flag)
2494 struct nesting *thisloop = ALLOC_NESTING ();
2496 /* Make an entry on loop_stack for the loop we are entering. */
2498 thisloop->desc = LOOP_NESTING;
2499 thisloop->next = loop_stack;
2500 thisloop->all = nesting_stack;
2501 thisloop->depth = ++nesting_depth;
2502 thisloop->data.loop.start_label = gen_label_rtx ();
2503 thisloop->data.loop.end_label = gen_label_rtx ();
2504 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2505 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2506 loop_stack = thisloop;
2507 nesting_stack = thisloop;
2509 do_pending_stack_adjust ();
2510 emit_queue ();
2511 emit_note (NOTE_INSN_LOOP_BEG);
2512 emit_label (thisloop->data.loop.start_label);
2514 return thisloop;
2517 /* Like expand_start_loop but for a loop where the continuation point
2518 (for expand_continue_loop) will be specified explicitly. */
2520 struct nesting *
2521 expand_start_loop_continue_elsewhere (int exit_flag)
2523 struct nesting *thisloop = expand_start_loop (exit_flag);
2524 loop_stack->data.loop.continue_label = gen_label_rtx ();
2525 return thisloop;
2528 /* Begin a null, aka do { } while (0) "loop". But since the contents
2529 of said loop can still contain a break, we must frob the loop nest. */
2531 struct nesting *
2532 expand_start_null_loop (void)
2534 struct nesting *thisloop = ALLOC_NESTING ();
2536 /* Make an entry on loop_stack for the loop we are entering. */
2538 thisloop->desc = LOOP_NESTING;
2539 thisloop->next = loop_stack;
2540 thisloop->all = nesting_stack;
2541 thisloop->depth = ++nesting_depth;
2542 thisloop->data.loop.start_label = emit_note (NOTE_INSN_DELETED);
2543 thisloop->data.loop.end_label = gen_label_rtx ();
2544 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2545 thisloop->exit_label = thisloop->data.loop.end_label;
2546 loop_stack = thisloop;
2547 nesting_stack = thisloop;
2549 return thisloop;
2552 /* Specify the continuation point for a loop started with
2553 expand_start_loop_continue_elsewhere.
2554 Use this at the point in the code to which a continue statement
2555 should jump. */
2557 void
2558 expand_loop_continue_here (void)
2560 do_pending_stack_adjust ();
2561 emit_note (NOTE_INSN_LOOP_CONT);
2562 emit_label (loop_stack->data.loop.continue_label);
2565 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2566 Pop the block off of loop_stack. */
2568 void
2569 expand_end_loop (void)
2571 rtx start_label = loop_stack->data.loop.start_label;
2572 rtx etc_note;
2573 int eh_regions, debug_blocks;
2574 bool empty_test;
2576 /* Mark the continue-point at the top of the loop if none elsewhere. */
2577 if (start_label == loop_stack->data.loop.continue_label)
2578 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2580 do_pending_stack_adjust ();
2582 /* If the loop starts with a loop exit, roll that to the end where
2583 it will optimize together with the jump back.
2585 If the loop presently looks like this (in pseudo-C):
2587 LOOP_BEG
2588 start_label:
2589 if (test) goto end_label;
2590 LOOP_END_TOP_COND
2591 body;
2592 goto start_label;
2593 end_label:
2595 transform it to look like:
2597 LOOP_BEG
2598 goto start_label;
2599 top_label:
2600 body;
2601 start_label:
2602 if (test) goto end_label;
2603 goto top_label;
2604 end_label:
2606 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2607 the end of the entry conditional. Without this, our lexical scan
2608 can't tell the difference between an entry conditional and a
2609 body conditional that exits the loop. Mistaking the two means
2610 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2611 screw up loop unrolling.
2613 Things will be oh so much better when loop optimization is done
2614 off of a proper control flow graph... */
2616 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2618 empty_test = true;
2619 eh_regions = debug_blocks = 0;
2620 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2621 if (GET_CODE (etc_note) == NOTE)
2623 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2624 break;
2626 /* We must not walk into a nested loop. */
2627 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2629 etc_note = NULL_RTX;
2630 break;
2633 /* At the same time, scan for EH region notes, as we don't want
2634 to scrog region nesting. This shouldn't happen, but... */
2635 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2636 eh_regions++;
2637 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2639 if (--eh_regions < 0)
2640 /* We've come to the end of an EH region, but never saw the
2641 beginning of that region. That means that an EH region
2642 begins before the top of the loop, and ends in the middle
2643 of it. The existence of such a situation violates a basic
2644 assumption in this code, since that would imply that even
2645 when EH_REGIONS is zero, we might move code out of an
2646 exception region. */
2647 abort ();
2650 /* Likewise for debug scopes. In this case we'll either (1) move
2651 all of the notes if they are properly nested or (2) leave the
2652 notes alone and only rotate the loop at high optimization
2653 levels when we expect to scrog debug info. */
2654 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2655 debug_blocks++;
2656 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2657 debug_blocks--;
2659 else if (INSN_P (etc_note))
2660 empty_test = false;
2662 if (etc_note
2663 && optimize
2664 && ! empty_test
2665 && eh_regions == 0
2666 && (debug_blocks == 0 || optimize >= 2)
2667 && NEXT_INSN (etc_note) != NULL_RTX
2668 && ! any_condjump_p (get_last_insn ()))
2670 /* We found one. Move everything from START to ETC to the end
2671 of the loop, and add a jump from the top of the loop. */
2672 rtx top_label = gen_label_rtx ();
2673 rtx start_move = start_label;
2675 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2676 then we want to move this note also. */
2677 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2678 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2679 start_move = PREV_INSN (start_move);
2681 emit_label_before (top_label, start_move);
2683 /* Actually move the insns. If the debug scopes are nested, we
2684 can move everything at once. Otherwise we have to move them
2685 one by one and squeeze out the block notes. */
2686 if (debug_blocks == 0)
2687 reorder_insns (start_move, etc_note, get_last_insn ());
2688 else
2690 rtx insn, next_insn;
2691 for (insn = start_move; insn; insn = next_insn)
2693 /* Figure out which insn comes after this one. We have
2694 to do this before we move INSN. */
2695 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2697 if (GET_CODE (insn) == NOTE
2698 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2699 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2700 continue;
2702 reorder_insns (insn, insn, get_last_insn ());
2706 /* Add the jump from the top of the loop. */
2707 emit_jump_insn_before (gen_jump (start_label), top_label);
2708 emit_barrier_before (top_label);
2709 start_label = top_label;
2712 emit_jump (start_label);
2713 emit_note (NOTE_INSN_LOOP_END);
2714 emit_label (loop_stack->data.loop.end_label);
2716 POPSTACK (loop_stack);
2718 clear_last_expr ();
2721 /* Finish a null loop, aka do { } while (0). */
2723 void
2724 expand_end_null_loop (void)
2726 do_pending_stack_adjust ();
2727 emit_label (loop_stack->data.loop.end_label);
2729 POPSTACK (loop_stack);
2731 clear_last_expr ();
2734 /* Generate a jump to the current loop's continue-point.
2735 This is usually the top of the loop, but may be specified
2736 explicitly elsewhere. If not currently inside a loop,
2737 return 0 and do nothing; caller will print an error message. */
2740 expand_continue_loop (struct nesting *whichloop)
2742 /* Emit information for branch prediction. */
2743 rtx note;
2745 if (flag_guess_branch_prob)
2747 note = emit_note (NOTE_INSN_PREDICTION);
2748 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2750 clear_last_expr ();
2751 if (whichloop == 0)
2752 whichloop = loop_stack;
2753 if (whichloop == 0)
2754 return 0;
2755 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2756 NULL_RTX);
2757 return 1;
2760 /* Generate a jump to exit the current loop. If not currently inside a loop,
2761 return 0 and do nothing; caller will print an error message. */
2764 expand_exit_loop (struct nesting *whichloop)
2766 clear_last_expr ();
2767 if (whichloop == 0)
2768 whichloop = loop_stack;
2769 if (whichloop == 0)
2770 return 0;
2771 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2772 return 1;
2775 /* Generate a conditional jump to exit the current loop if COND
2776 evaluates to zero. If not currently inside a loop,
2777 return 0 and do nothing; caller will print an error message. */
2780 expand_exit_loop_if_false (struct nesting *whichloop, tree cond)
2782 rtx label;
2783 clear_last_expr ();
2785 if (whichloop == 0)
2786 whichloop = loop_stack;
2787 if (whichloop == 0)
2788 return 0;
2790 if (integer_nonzerop (cond))
2791 return 1;
2792 if (integer_zerop (cond))
2793 return expand_exit_loop (whichloop);
2795 /* Check if we definitely won't need a fixup. */
2796 if (whichloop == nesting_stack)
2798 jumpifnot (cond, whichloop->data.loop.end_label);
2799 return 1;
2802 /* In order to handle fixups, we actually create a conditional jump
2803 around an unconditional branch to exit the loop. If fixups are
2804 necessary, they go before the unconditional branch. */
2806 label = gen_label_rtx ();
2807 jumpif (cond, label);
2808 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2809 NULL_RTX);
2810 emit_label (label);
2812 return 1;
2815 /* Like expand_exit_loop_if_false except also emit a note marking
2816 the end of the conditional. Should only be used immediately
2817 after expand_loop_start. */
2820 expand_exit_loop_top_cond (struct nesting *whichloop, tree cond)
2822 if (! expand_exit_loop_if_false (whichloop, cond))
2823 return 0;
2825 emit_note (NOTE_INSN_LOOP_END_TOP_COND);
2826 return 1;
2829 /* Return nonzero if we should preserve sub-expressions as separate
2830 pseudos. We never do so if we aren't optimizing. We always do so
2831 if -fexpensive-optimizations.
2833 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2834 the loop may still be a small one. */
2837 preserve_subexpressions_p (void)
2839 rtx insn;
2841 if (flag_expensive_optimizations)
2842 return 1;
2844 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2845 return 0;
2847 insn = get_last_insn_anywhere ();
2849 return (insn
2850 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2851 < n_non_fixed_regs * 3));
2855 /* Generate a jump to exit the current loop, conditional, binding contour
2856 or case statement. Not all such constructs are visible to this function,
2857 only those started with EXIT_FLAG nonzero. Individual languages use
2858 the EXIT_FLAG parameter to control which kinds of constructs you can
2859 exit this way.
2861 If not currently inside anything that can be exited,
2862 return 0 and do nothing; caller will print an error message. */
2865 expand_exit_something (void)
2867 struct nesting *n;
2868 clear_last_expr ();
2869 for (n = nesting_stack; n; n = n->all)
2870 if (n->exit_label != 0)
2872 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2873 return 1;
2876 return 0;
2879 /* Generate RTL to return from the current function, with no value.
2880 (That is, we do not do anything about returning any value.) */
2882 void
2883 expand_null_return (void)
2885 rtx last_insn;
2887 last_insn = get_last_insn ();
2889 /* If this function was declared to return a value, but we
2890 didn't, clobber the return registers so that they are not
2891 propagated live to the rest of the function. */
2892 clobber_return_register ();
2894 expand_null_return_1 (last_insn);
2897 /* Generate RTL to return directly from the current function.
2898 (That is, we bypass any return value.) */
2900 void
2901 expand_naked_return (void)
2903 rtx last_insn, end_label;
2905 last_insn = get_last_insn ();
2906 end_label = naked_return_label;
2908 clear_pending_stack_adjust ();
2909 do_pending_stack_adjust ();
2910 clear_last_expr ();
2912 if (end_label == 0)
2913 end_label = naked_return_label = gen_label_rtx ();
2914 expand_goto_internal (NULL_TREE, end_label, last_insn);
2917 /* Try to guess whether the value of return means error code. */
2918 static enum br_predictor
2919 return_prediction (rtx val)
2921 /* Different heuristics for pointers and scalars. */
2922 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2924 /* NULL is usually not returned. */
2925 if (val == const0_rtx)
2926 return PRED_NULL_RETURN;
2928 else
2930 /* Negative return values are often used to indicate
2931 errors. */
2932 if (GET_CODE (val) == CONST_INT
2933 && INTVAL (val) < 0)
2934 return PRED_NEGATIVE_RETURN;
2935 /* Constant return values are also usually erors,
2936 zero/one often mean booleans so exclude them from the
2937 heuristics. */
2938 if (CONSTANT_P (val)
2939 && (val != const0_rtx && val != const1_rtx))
2940 return PRED_CONST_RETURN;
2942 return PRED_NO_PREDICTION;
2946 /* If the current function returns values in the most significant part
2947 of a register, shift return value VAL appropriately. The mode of
2948 the function's return type is known not to be BLKmode. */
2950 static rtx
2951 shift_return_value (rtx val)
2953 tree type;
2955 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2956 if (targetm.calls.return_in_msb (type))
2958 rtx target;
2959 HOST_WIDE_INT shift;
2961 target = DECL_RTL (DECL_RESULT (current_function_decl));
2962 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2963 - BITS_PER_UNIT * int_size_in_bytes (type));
2964 if (shift > 0)
2965 val = expand_binop (GET_MODE (target), ashl_optab,
2966 gen_lowpart (GET_MODE (target), val),
2967 GEN_INT (shift), target, 1, OPTAB_WIDEN);
2969 return val;
2973 /* Generate RTL to return from the current function, with value VAL. */
2975 static void
2976 expand_value_return (rtx val)
2978 rtx last_insn;
2979 rtx return_reg;
2980 enum br_predictor pred;
2982 if (flag_guess_branch_prob
2983 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2985 /* Emit information for branch prediction. */
2986 rtx note;
2988 note = emit_note (NOTE_INSN_PREDICTION);
2990 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2994 last_insn = get_last_insn ();
2995 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2997 /* Copy the value to the return location
2998 unless it's already there. */
3000 if (return_reg != val)
3002 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3003 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
3005 int unsignedp = TREE_UNSIGNED (type);
3006 enum machine_mode old_mode
3007 = DECL_MODE (DECL_RESULT (current_function_decl));
3008 enum machine_mode mode
3009 = promote_mode (type, old_mode, &unsignedp, 1);
3011 if (mode != old_mode)
3012 val = convert_modes (mode, old_mode, val, unsignedp);
3014 if (GET_CODE (return_reg) == PARALLEL)
3015 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3016 else
3017 emit_move_insn (return_reg, val);
3020 expand_null_return_1 (last_insn);
3023 /* Output a return with no value. If LAST_INSN is nonzero,
3024 pretend that the return takes place after LAST_INSN. */
3026 static void
3027 expand_null_return_1 (rtx last_insn)
3029 rtx end_label = cleanup_label ? cleanup_label : return_label;
3031 clear_pending_stack_adjust ();
3032 do_pending_stack_adjust ();
3033 clear_last_expr ();
3035 if (end_label == 0)
3036 end_label = return_label = gen_label_rtx ();
3037 expand_goto_internal (NULL_TREE, end_label, last_insn);
3040 /* Generate RTL to evaluate the expression RETVAL and return it
3041 from the current function. */
3043 void
3044 expand_return (tree retval)
3046 /* If there are any cleanups to be performed, then they will
3047 be inserted following LAST_INSN. It is desirable
3048 that the last_insn, for such purposes, should be the
3049 last insn before computing the return value. Otherwise, cleanups
3050 which call functions can clobber the return value. */
3051 /* ??? rms: I think that is erroneous, because in C++ it would
3052 run destructors on variables that might be used in the subsequent
3053 computation of the return value. */
3054 rtx last_insn = 0;
3055 rtx result_rtl;
3056 rtx val = 0;
3057 tree retval_rhs;
3059 /* If function wants no value, give it none. */
3060 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3062 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3063 emit_queue ();
3064 expand_null_return ();
3065 return;
3068 if (retval == error_mark_node)
3070 /* Treat this like a return of no value from a function that
3071 returns a value. */
3072 expand_null_return ();
3073 return;
3075 else if (TREE_CODE (retval) == RESULT_DECL)
3076 retval_rhs = retval;
3077 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3078 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3079 retval_rhs = TREE_OPERAND (retval, 1);
3080 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3081 /* Recognize tail-recursive call to void function. */
3082 retval_rhs = retval;
3083 else
3084 retval_rhs = NULL_TREE;
3086 last_insn = get_last_insn ();
3088 /* Distribute return down conditional expr if either of the sides
3089 may involve tail recursion (see test below). This enhances the number
3090 of tail recursions we see. Don't do this always since it can produce
3091 sub-optimal code in some cases and we distribute assignments into
3092 conditional expressions when it would help. */
3094 if (optimize && retval_rhs != 0
3095 && frame_offset == 0
3096 && TREE_CODE (retval_rhs) == COND_EXPR
3097 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3098 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3100 rtx label = gen_label_rtx ();
3101 tree expr;
3103 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3104 start_cleanup_deferral ();
3105 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3106 DECL_RESULT (current_function_decl),
3107 TREE_OPERAND (retval_rhs, 1));
3108 TREE_SIDE_EFFECTS (expr) = 1;
3109 expand_return (expr);
3110 emit_label (label);
3112 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3113 DECL_RESULT (current_function_decl),
3114 TREE_OPERAND (retval_rhs, 2));
3115 TREE_SIDE_EFFECTS (expr) = 1;
3116 expand_return (expr);
3117 end_cleanup_deferral ();
3118 return;
3121 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3123 /* If the result is an aggregate that is being returned in one (or more)
3124 registers, load the registers here. The compiler currently can't handle
3125 copying a BLKmode value into registers. We could put this code in a
3126 more general area (for use by everyone instead of just function
3127 call/return), but until this feature is generally usable it is kept here
3128 (and in expand_call). The value must go into a pseudo in case there
3129 are cleanups that will clobber the real return register. */
3131 if (retval_rhs != 0
3132 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3133 && GET_CODE (result_rtl) == REG)
3135 int i;
3136 unsigned HOST_WIDE_INT bitpos, xbitpos;
3137 unsigned HOST_WIDE_INT padding_correction = 0;
3138 unsigned HOST_WIDE_INT bytes
3139 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3140 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3141 unsigned int bitsize
3142 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3143 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
3144 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3145 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3146 enum machine_mode tmpmode, result_reg_mode;
3148 if (bytes == 0)
3150 expand_null_return ();
3151 return;
3154 /* If the structure doesn't take up a whole number of words, see
3155 whether the register value should be padded on the left or on
3156 the right. Set PADDING_CORRECTION to the number of padding
3157 bits needed on the left side.
3159 In most ABIs, the structure will be returned at the least end of
3160 the register, which translates to right padding on little-endian
3161 targets and left padding on big-endian targets. The opposite
3162 holds if the structure is returned at the most significant
3163 end of the register. */
3164 if (bytes % UNITS_PER_WORD != 0
3165 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
3166 ? !BYTES_BIG_ENDIAN
3167 : BYTES_BIG_ENDIAN))
3168 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3169 * BITS_PER_UNIT));
3171 /* Copy the structure BITSIZE bits at a time. */
3172 for (bitpos = 0, xbitpos = padding_correction;
3173 bitpos < bytes * BITS_PER_UNIT;
3174 bitpos += bitsize, xbitpos += bitsize)
3176 /* We need a new destination pseudo each time xbitpos is
3177 on a word boundary and when xbitpos == padding_correction
3178 (the first time through). */
3179 if (xbitpos % BITS_PER_WORD == 0
3180 || xbitpos == padding_correction)
3182 /* Generate an appropriate register. */
3183 dst = gen_reg_rtx (word_mode);
3184 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3186 /* Clear the destination before we move anything into it. */
3187 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3190 /* We need a new source operand each time bitpos is on a word
3191 boundary. */
3192 if (bitpos % BITS_PER_WORD == 0)
3193 src = operand_subword_force (result_val,
3194 bitpos / BITS_PER_WORD,
3195 BLKmode);
3197 /* Use bitpos for the source extraction (left justified) and
3198 xbitpos for the destination store (right justified). */
3199 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3200 extract_bit_field (src, bitsize,
3201 bitpos % BITS_PER_WORD, 1,
3202 NULL_RTX, word_mode, word_mode,
3203 BITS_PER_WORD),
3204 BITS_PER_WORD);
3207 tmpmode = GET_MODE (result_rtl);
3208 if (tmpmode == BLKmode)
3210 /* Find the smallest integer mode large enough to hold the
3211 entire structure and use that mode instead of BLKmode
3212 on the USE insn for the return register. */
3213 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3214 tmpmode != VOIDmode;
3215 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3216 /* Have we found a large enough mode? */
3217 if (GET_MODE_SIZE (tmpmode) >= bytes)
3218 break;
3220 /* No suitable mode found. */
3221 if (tmpmode == VOIDmode)
3222 abort ();
3224 PUT_MODE (result_rtl, tmpmode);
3227 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3228 result_reg_mode = word_mode;
3229 else
3230 result_reg_mode = tmpmode;
3231 result_reg = gen_reg_rtx (result_reg_mode);
3233 emit_queue ();
3234 for (i = 0; i < n_regs; i++)
3235 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3236 result_pseudos[i]);
3238 if (tmpmode != result_reg_mode)
3239 result_reg = gen_lowpart (tmpmode, result_reg);
3241 expand_value_return (result_reg);
3243 else if (retval_rhs != 0
3244 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3245 && (GET_CODE (result_rtl) == REG
3246 || (GET_CODE (result_rtl) == PARALLEL)))
3248 /* Calculate the return value into a temporary (usually a pseudo
3249 reg). */
3250 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3251 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3253 val = assign_temp (nt, 0, 0, 1);
3254 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3255 val = force_not_mem (val);
3256 emit_queue ();
3257 /* Return the calculated value, doing cleanups first. */
3258 expand_value_return (shift_return_value (val));
3260 else
3262 /* No cleanups or no hard reg used;
3263 calculate value into hard return reg. */
3264 expand_expr (retval, const0_rtx, VOIDmode, 0);
3265 emit_queue ();
3266 expand_value_return (result_rtl);
3270 /* Attempt to optimize a potential tail recursion call into a goto.
3271 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3272 where to place the jump to the tail recursion label.
3274 Return TRUE if the call was optimized into a goto. */
3277 optimize_tail_recursion (tree arguments, rtx last_insn)
3279 /* Finish checking validity, and if valid emit code to set the
3280 argument variables for the new call. */
3281 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3283 if (tail_recursion_label == 0)
3285 tail_recursion_label = gen_label_rtx ();
3286 emit_label_after (tail_recursion_label,
3287 tail_recursion_reentry);
3289 emit_queue ();
3290 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3291 emit_barrier ();
3292 return 1;
3294 return 0;
3297 /* Emit code to alter this function's formal parms for a tail-recursive call.
3298 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3299 FORMALS is the chain of decls of formals.
3300 Return 1 if this can be done;
3301 otherwise return 0 and do not emit any code. */
3303 static int
3304 tail_recursion_args (tree actuals, tree formals)
3306 tree a = actuals, f = formals;
3307 int i;
3308 rtx *argvec;
3310 /* Check that number and types of actuals are compatible
3311 with the formals. This is not always true in valid C code.
3312 Also check that no formal needs to be addressable
3313 and that all formals are scalars. */
3315 /* Also count the args. */
3317 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3319 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3320 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3321 return 0;
3322 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3323 return 0;
3325 if (a != 0 || f != 0)
3326 return 0;
3328 /* Compute all the actuals. */
3330 argvec = alloca (i * sizeof (rtx));
3332 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3333 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3335 /* Find which actual values refer to current values of previous formals.
3336 Copy each of them now, before any formal is changed. */
3338 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3340 int copy = 0;
3341 int j;
3342 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3343 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3345 copy = 1;
3346 break;
3348 if (copy)
3349 argvec[i] = copy_to_reg (argvec[i]);
3352 /* Store the values of the actuals into the formals. */
3354 for (f = formals, a = actuals, i = 0; f;
3355 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3357 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3358 emit_move_insn (DECL_RTL (f), argvec[i]);
3359 else
3361 rtx tmp = argvec[i];
3362 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
3363 promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3364 &unsignedp, 0);
3365 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3367 tmp = gen_reg_rtx (DECL_MODE (f));
3368 convert_move (tmp, argvec[i], unsignedp);
3370 convert_move (DECL_RTL (f), tmp, unsignedp);
3374 free_temp_slots ();
3375 return 1;
3378 /* Generate the RTL code for entering a binding contour.
3379 The variables are declared one by one, by calls to `expand_decl'.
3381 FLAGS is a bitwise or of the following flags:
3383 1 - Nonzero if this construct should be visible to
3384 `exit_something'.
3386 2 - Nonzero if this contour does not require a
3387 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3388 language-independent code should set this flag because they
3389 will not create corresponding BLOCK nodes. (There should be
3390 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3391 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3392 when expand_end_bindings is called.
3394 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3395 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3396 note. */
3398 void
3399 expand_start_bindings_and_block (int flags, tree block)
3401 struct nesting *thisblock = ALLOC_NESTING ();
3402 rtx note;
3403 int exit_flag = ((flags & 1) != 0);
3404 int block_flag = ((flags & 2) == 0);
3406 /* If a BLOCK is supplied, then the caller should be requesting a
3407 NOTE_INSN_BLOCK_BEG note. */
3408 if (!block_flag && block)
3409 abort ();
3411 /* Create a note to mark the beginning of the block. */
3412 if (block_flag)
3414 note = emit_note (NOTE_INSN_BLOCK_BEG);
3415 NOTE_BLOCK (note) = block;
3417 else
3418 note = emit_note (NOTE_INSN_DELETED);
3420 /* Make an entry on block_stack for the block we are entering. */
3422 thisblock->desc = BLOCK_NESTING;
3423 thisblock->next = block_stack;
3424 thisblock->all = nesting_stack;
3425 thisblock->depth = ++nesting_depth;
3426 thisblock->data.block.stack_level = 0;
3427 thisblock->data.block.cleanups = 0;
3428 thisblock->data.block.exception_region = 0;
3429 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3431 thisblock->data.block.conditional_code = 0;
3432 thisblock->data.block.last_unconditional_cleanup = note;
3433 /* When we insert instructions after the last unconditional cleanup,
3434 we don't adjust last_insn. That means that a later add_insn will
3435 clobber the instructions we've just added. The easiest way to
3436 fix this is to just insert another instruction here, so that the
3437 instructions inserted after the last unconditional cleanup are
3438 never the last instruction. */
3439 emit_note (NOTE_INSN_DELETED);
3441 if (block_stack
3442 && !(block_stack->data.block.cleanups == NULL_TREE
3443 && block_stack->data.block.outer_cleanups == NULL_TREE))
3444 thisblock->data.block.outer_cleanups
3445 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3446 block_stack->data.block.outer_cleanups);
3447 else
3448 thisblock->data.block.outer_cleanups = 0;
3449 thisblock->data.block.label_chain = 0;
3450 thisblock->data.block.innermost_stack_block = stack_block_stack;
3451 thisblock->data.block.first_insn = note;
3452 thisblock->data.block.block_start_count = ++current_block_start_count;
3453 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3454 block_stack = thisblock;
3455 nesting_stack = thisblock;
3457 /* Make a new level for allocating stack slots. */
3458 push_temp_slots ();
3461 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3462 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3463 expand_expr are made. After we end the region, we know that all
3464 space for all temporaries that were created by TARGET_EXPRs will be
3465 destroyed and their space freed for reuse. */
3467 void
3468 expand_start_target_temps (void)
3470 /* This is so that even if the result is preserved, the space
3471 allocated will be freed, as we know that it is no longer in use. */
3472 push_temp_slots ();
3474 /* Start a new binding layer that will keep track of all cleanup
3475 actions to be performed. */
3476 expand_start_bindings (2);
3478 target_temp_slot_level = temp_slot_level;
3481 void
3482 expand_end_target_temps (void)
3484 expand_end_bindings (NULL_TREE, 0, 0);
3486 /* This is so that even if the result is preserved, the space
3487 allocated will be freed, as we know that it is no longer in use. */
3488 pop_temp_slots ();
3491 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3492 in question represents the outermost pair of curly braces (i.e. the "body
3493 block") of a function or method.
3495 For any BLOCK node representing a "body block" of a function or method, the
3496 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3497 represents the outermost (function) scope for the function or method (i.e.
3498 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3499 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3502 is_body_block (tree stmt)
3504 if (lang_hooks.no_body_blocks)
3505 return 0;
3507 if (TREE_CODE (stmt) == BLOCK)
3509 tree parent = BLOCK_SUPERCONTEXT (stmt);
3511 if (parent && TREE_CODE (parent) == BLOCK)
3513 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3515 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3516 return 1;
3520 return 0;
3523 /* True if we are currently emitting insns in an area of output code
3524 that is controlled by a conditional expression. This is used by
3525 the cleanup handling code to generate conditional cleanup actions. */
3528 conditional_context (void)
3530 return block_stack && block_stack->data.block.conditional_code;
3533 /* Return an opaque pointer to the current nesting level, so frontend code
3534 can check its own sanity. */
3536 struct nesting *
3537 current_nesting_level (void)
3539 return cfun ? block_stack : 0;
3542 /* Emit a handler label for a nonlocal goto handler.
3543 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3545 static rtx
3546 expand_nl_handler_label (rtx slot, rtx before_insn)
3548 rtx insns;
3549 rtx handler_label = gen_label_rtx ();
3551 /* Don't let cleanup_cfg delete the handler. */
3552 LABEL_PRESERVE_P (handler_label) = 1;
3554 start_sequence ();
3555 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3556 insns = get_insns ();
3557 end_sequence ();
3558 emit_insn_before (insns, before_insn);
3560 emit_label (handler_label);
3562 return handler_label;
3565 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3566 handler. */
3567 static void
3568 expand_nl_goto_receiver (void)
3570 /* Clobber the FP when we get here, so we have to make sure it's
3571 marked as used by this function. */
3572 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
3574 /* Mark the static chain as clobbered here so life information
3575 doesn't get messed up for it. */
3576 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
3578 #ifdef HAVE_nonlocal_goto
3579 if (! HAVE_nonlocal_goto)
3580 #endif
3581 /* First adjust our frame pointer to its actual value. It was
3582 previously set to the start of the virtual area corresponding to
3583 the stacked variables when we branched here and now needs to be
3584 adjusted to the actual hardware fp value.
3586 Assignments are to virtual registers are converted by
3587 instantiate_virtual_regs into the corresponding assignment
3588 to the underlying register (fp in this case) that makes
3589 the original assignment true.
3590 So the following insn will actually be
3591 decrementing fp by STARTING_FRAME_OFFSET. */
3592 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3594 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3595 if (fixed_regs[ARG_POINTER_REGNUM])
3597 #ifdef ELIMINABLE_REGS
3598 /* If the argument pointer can be eliminated in favor of the
3599 frame pointer, we don't need to restore it. We assume here
3600 that if such an elimination is present, it can always be used.
3601 This is the case on all known machines; if we don't make this
3602 assumption, we do unnecessary saving on many machines. */
3603 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3604 size_t i;
3606 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3607 if (elim_regs[i].from == ARG_POINTER_REGNUM
3608 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3609 break;
3611 if (i == ARRAY_SIZE (elim_regs))
3612 #endif
3614 /* Now restore our arg pointer from the address at which it
3615 was saved in our stack frame. */
3616 emit_move_insn (virtual_incoming_args_rtx,
3617 copy_to_reg (get_arg_pointer_save_area (cfun)));
3620 #endif
3622 #ifdef HAVE_nonlocal_goto_receiver
3623 if (HAVE_nonlocal_goto_receiver)
3624 emit_insn (gen_nonlocal_goto_receiver ());
3625 #endif
3627 /* @@@ This is a kludge. Not all machine descriptions define a blockage
3628 insn, but we must not allow the code we just generated to be reordered
3629 by scheduling. Specifically, the update of the frame pointer must
3630 happen immediately, not later. So emit an ASM_INPUT to act as blockage
3631 insn. */
3632 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
3635 /* Make handlers for nonlocal gotos taking place in the function calls in
3636 block THISBLOCK. */
3638 static void
3639 expand_nl_goto_receivers (struct nesting *thisblock)
3641 tree link;
3642 rtx afterward = gen_label_rtx ();
3643 rtx insns, slot;
3644 rtx label_list;
3645 int any_invalid;
3647 /* Record the handler address in the stack slot for that purpose,
3648 during this block, saving and restoring the outer value. */
3649 if (thisblock->next != 0)
3650 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3652 rtx save_receiver = gen_reg_rtx (Pmode);
3653 emit_move_insn (XEXP (slot, 0), save_receiver);
3655 start_sequence ();
3656 emit_move_insn (save_receiver, XEXP (slot, 0));
3657 insns = get_insns ();
3658 end_sequence ();
3659 emit_insn_before (insns, thisblock->data.block.first_insn);
3662 /* Jump around the handlers; they run only when specially invoked. */
3663 emit_jump (afterward);
3665 /* Make a separate handler for each label. */
3666 link = nonlocal_labels;
3667 slot = nonlocal_goto_handler_slots;
3668 label_list = NULL_RTX;
3669 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3670 /* Skip any labels we shouldn't be able to jump to from here,
3671 we generate one special handler for all of them below which just calls
3672 abort. */
3673 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3675 rtx lab;
3676 lab = expand_nl_handler_label (XEXP (slot, 0),
3677 thisblock->data.block.first_insn);
3678 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3680 expand_nl_goto_receiver ();
3682 /* Jump to the "real" nonlocal label. */
3683 expand_goto (TREE_VALUE (link));
3686 /* A second pass over all nonlocal labels; this time we handle those
3687 we should not be able to jump to at this point. */
3688 link = nonlocal_labels;
3689 slot = nonlocal_goto_handler_slots;
3690 any_invalid = 0;
3691 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3692 if (DECL_TOO_LATE (TREE_VALUE (link)))
3694 rtx lab;
3695 lab = expand_nl_handler_label (XEXP (slot, 0),
3696 thisblock->data.block.first_insn);
3697 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3698 any_invalid = 1;
3701 if (any_invalid)
3703 expand_nl_goto_receiver ();
3704 expand_builtin_trap ();
3707 nonlocal_goto_handler_labels = label_list;
3708 emit_label (afterward);
3711 /* Warn about any unused VARS (which may contain nodes other than
3712 VAR_DECLs, but such nodes are ignored). The nodes are connected
3713 via the TREE_CHAIN field. */
3715 void
3716 warn_about_unused_variables (tree vars)
3718 tree decl;
3720 if (warn_unused_variable)
3721 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3722 if (TREE_CODE (decl) == VAR_DECL
3723 && ! TREE_USED (decl)
3724 && ! DECL_IN_SYSTEM_HEADER (decl)
3725 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3726 warning ("%Junused variable '%D'", decl, decl);
3729 /* Generate RTL code to terminate a binding contour.
3731 VARS is the chain of VAR_DECL nodes for the variables bound in this
3732 contour. There may actually be other nodes in this chain, but any
3733 nodes other than VAR_DECLS are ignored.
3735 MARK_ENDS is nonzero if we should put a note at the beginning
3736 and end of this binding contour.
3738 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3739 zero if we can jump into this contour only if it does not have a saved
3740 stack level, and negative if we are not to check for invalid use of
3741 labels (because the front end does that). */
3743 void
3744 expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
3746 struct nesting *thisblock = block_stack;
3748 /* If any of the variables in this scope were not used, warn the
3749 user. */
3750 warn_about_unused_variables (vars);
3752 if (thisblock->exit_label)
3754 do_pending_stack_adjust ();
3755 emit_label (thisblock->exit_label);
3758 /* If necessary, make handlers for nonlocal gotos taking
3759 place in the function calls in this block. */
3760 if (function_call_count != 0 && nonlocal_labels
3761 /* Make handler for outermost block
3762 if there were any nonlocal gotos to this function. */
3763 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3764 /* Make handler for inner block if it has something
3765 special to do when you jump out of it. */
3766 : (thisblock->data.block.cleanups != 0
3767 || thisblock->data.block.stack_level != 0)))
3768 expand_nl_goto_receivers (thisblock);
3770 /* Don't allow jumping into a block that has a stack level.
3771 Cleanups are allowed, though. */
3772 if (dont_jump_in > 0
3773 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3775 struct label_chain *chain;
3777 /* Any labels in this block are no longer valid to go to.
3778 Mark them to cause an error message. */
3779 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3781 DECL_TOO_LATE (chain->label) = 1;
3782 /* If any goto without a fixup came to this label,
3783 that must be an error, because gotos without fixups
3784 come from outside all saved stack-levels. */
3785 if (TREE_ADDRESSABLE (chain->label))
3786 error ("%Jlabel '%D' used before containing binding contour",
3787 chain->label, chain->label);
3791 /* Restore stack level in effect before the block
3792 (only if variable-size objects allocated). */
3793 /* Perform any cleanups associated with the block. */
3795 if (thisblock->data.block.stack_level != 0
3796 || thisblock->data.block.cleanups != 0)
3798 int reachable;
3799 rtx insn;
3801 /* Don't let cleanups affect ({...}) constructs. */
3802 int old_expr_stmts_for_value = expr_stmts_for_value;
3803 rtx old_last_expr_value = last_expr_value;
3804 tree old_last_expr_type = last_expr_type;
3805 expr_stmts_for_value = 0;
3807 /* Only clean up here if this point can actually be reached. */
3808 insn = get_last_insn ();
3809 if (GET_CODE (insn) == NOTE)
3810 insn = prev_nonnote_insn (insn);
3811 reachable = (! insn || GET_CODE (insn) != BARRIER);
3813 /* Do the cleanups. */
3814 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3815 if (reachable)
3816 do_pending_stack_adjust ();
3818 expr_stmts_for_value = old_expr_stmts_for_value;
3819 last_expr_value = old_last_expr_value;
3820 last_expr_type = old_last_expr_type;
3822 /* Restore the stack level. */
3824 if (reachable && thisblock->data.block.stack_level != 0)
3826 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3827 thisblock->data.block.stack_level, NULL_RTX);
3828 if (nonlocal_goto_handler_slots != 0)
3829 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3830 NULL_RTX);
3833 /* Any gotos out of this block must also do these things.
3834 Also report any gotos with fixups that came to labels in this
3835 level. */
3836 fixup_gotos (thisblock,
3837 thisblock->data.block.stack_level,
3838 thisblock->data.block.cleanups,
3839 thisblock->data.block.first_insn,
3840 dont_jump_in);
3843 /* Mark the beginning and end of the scope if requested.
3844 We do this now, after running cleanups on the variables
3845 just going out of scope, so they are in scope for their cleanups. */
3847 if (mark_ends)
3849 rtx note = emit_note (NOTE_INSN_BLOCK_END);
3850 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3852 else
3853 /* Get rid of the beginning-mark if we don't make an end-mark. */
3854 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3856 /* Restore the temporary level of TARGET_EXPRs. */
3857 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3859 /* Restore block_stack level for containing block. */
3861 stack_block_stack = thisblock->data.block.innermost_stack_block;
3862 POPSTACK (block_stack);
3864 /* Pop the stack slot nesting and free any slots at this level. */
3865 pop_temp_slots ();
3868 /* Generate code to save the stack pointer at the start of the current block
3869 and set up to restore it on exit. */
3871 void
3872 save_stack_pointer (void)
3874 struct nesting *thisblock = block_stack;
3876 if (thisblock->data.block.stack_level == 0)
3878 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3879 &thisblock->data.block.stack_level,
3880 thisblock->data.block.first_insn);
3881 stack_block_stack = thisblock;
3885 /* Generate RTL for the automatic variable declaration DECL.
3886 (Other kinds of declarations are simply ignored if seen here.) */
3888 void
3889 expand_decl (tree decl)
3891 tree type;
3893 type = TREE_TYPE (decl);
3895 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3896 type in case this node is used in a reference. */
3897 if (TREE_CODE (decl) == CONST_DECL)
3899 DECL_MODE (decl) = TYPE_MODE (type);
3900 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3901 DECL_SIZE (decl) = TYPE_SIZE (type);
3902 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3903 return;
3906 /* Otherwise, only automatic variables need any expansion done. Static and
3907 external variables, and external functions, will be handled by
3908 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3909 nothing. PARM_DECLs are handled in `assign_parms'. */
3910 if (TREE_CODE (decl) != VAR_DECL)
3911 return;
3913 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3914 return;
3916 /* Create the RTL representation for the variable. */
3918 if (type == error_mark_node)
3919 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3921 else if (DECL_SIZE (decl) == 0)
3922 /* Variable with incomplete type. */
3924 rtx x;
3925 if (DECL_INITIAL (decl) == 0)
3926 /* Error message was already done; now avoid a crash. */
3927 x = gen_rtx_MEM (BLKmode, const0_rtx);
3928 else
3929 /* An initializer is going to decide the size of this array.
3930 Until we know the size, represent its address with a reg. */
3931 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3933 set_mem_attributes (x, decl, 1);
3934 SET_DECL_RTL (decl, x);
3936 else if (DECL_MODE (decl) != BLKmode
3937 /* If -ffloat-store, don't put explicit float vars
3938 into regs. */
3939 && !(flag_float_store
3940 && TREE_CODE (type) == REAL_TYPE)
3941 && ! TREE_THIS_VOLATILE (decl)
3942 && ! DECL_NONLOCAL (decl)
3943 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3945 /* Automatic variable that can go in a register. */
3946 int unsignedp = TREE_UNSIGNED (type);
3947 enum machine_mode reg_mode
3948 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3950 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3952 if (!DECL_ARTIFICIAL (decl))
3953 mark_user_reg (DECL_RTL (decl));
3955 if (POINTER_TYPE_P (type))
3956 mark_reg_pointer (DECL_RTL (decl),
3957 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3959 maybe_set_unchanging (DECL_RTL (decl), decl);
3961 /* If something wants our address, try to use ADDRESSOF. */
3962 if (TREE_ADDRESSABLE (decl))
3963 put_var_into_stack (decl, /*rescan=*/false);
3966 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3967 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3968 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3969 STACK_CHECK_MAX_VAR_SIZE)))
3971 /* Variable of fixed size that goes on the stack. */
3972 rtx oldaddr = 0;
3973 rtx addr;
3974 rtx x;
3976 /* If we previously made RTL for this decl, it must be an array
3977 whose size was determined by the initializer.
3978 The old address was a register; set that register now
3979 to the proper address. */
3980 if (DECL_RTL_SET_P (decl))
3982 if (GET_CODE (DECL_RTL (decl)) != MEM
3983 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3984 abort ();
3985 oldaddr = XEXP (DECL_RTL (decl), 0);
3988 /* Set alignment we actually gave this decl. */
3989 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3990 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3991 DECL_USER_ALIGN (decl) = 0;
3993 x = assign_temp (decl, 1, 1, 1);
3994 set_mem_attributes (x, decl, 1);
3995 SET_DECL_RTL (decl, x);
3997 if (oldaddr)
3999 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
4000 if (addr != oldaddr)
4001 emit_move_insn (oldaddr, addr);
4004 else
4005 /* Dynamic-size object: must push space on the stack. */
4007 rtx address, size, x;
4009 /* Record the stack pointer on entry to block, if have
4010 not already done so. */
4011 do_pending_stack_adjust ();
4012 save_stack_pointer ();
4014 /* In function-at-a-time mode, variable_size doesn't expand this,
4015 so do it now. */
4016 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4017 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4018 const0_rtx, VOIDmode, 0);
4020 /* Compute the variable's size, in bytes. */
4021 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4022 free_temp_slots ();
4024 /* Allocate space on the stack for the variable. Note that
4025 DECL_ALIGN says how the variable is to be aligned and we
4026 cannot use it to conclude anything about the alignment of
4027 the size. */
4028 address = allocate_dynamic_stack_space (size, NULL_RTX,
4029 TYPE_ALIGN (TREE_TYPE (decl)));
4031 /* Reference the variable indirect through that rtx. */
4032 x = gen_rtx_MEM (DECL_MODE (decl), address);
4033 set_mem_attributes (x, decl, 1);
4034 SET_DECL_RTL (decl, x);
4037 /* Indicate the alignment we actually gave this variable. */
4038 #ifdef STACK_BOUNDARY
4039 DECL_ALIGN (decl) = STACK_BOUNDARY;
4040 #else
4041 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4042 #endif
4043 DECL_USER_ALIGN (decl) = 0;
4047 /* Emit code to perform the initialization of a declaration DECL. */
4049 void
4050 expand_decl_init (tree decl)
4052 int was_used = TREE_USED (decl);
4054 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4055 for static decls. */
4056 if (TREE_CODE (decl) == CONST_DECL
4057 || TREE_STATIC (decl))
4058 return;
4060 /* Compute and store the initial value now. */
4062 push_temp_slots ();
4064 if (DECL_INITIAL (decl) == error_mark_node)
4066 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4068 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4069 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4070 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4072 emit_queue ();
4074 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4076 emit_line_note (DECL_SOURCE_LOCATION (decl));
4077 expand_assignment (decl, DECL_INITIAL (decl), 0);
4078 emit_queue ();
4081 /* Don't let the initialization count as "using" the variable. */
4082 TREE_USED (decl) = was_used;
4084 /* Free any temporaries we made while initializing the decl. */
4085 preserve_temp_slots (NULL_RTX);
4086 free_temp_slots ();
4087 pop_temp_slots ();
4090 /* CLEANUP is an expression to be executed at exit from this binding contour;
4091 for example, in C++, it might call the destructor for this variable.
4093 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4094 CLEANUP multiple times, and have the correct semantics. This
4095 happens in exception handling, for gotos, returns, breaks that
4096 leave the current scope.
4098 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4099 that is not associated with any particular variable. */
4102 expand_decl_cleanup (tree decl, tree cleanup)
4104 struct nesting *thisblock;
4106 /* Error if we are not in any block. */
4107 if (cfun == 0 || block_stack == 0)
4108 return 0;
4110 thisblock = block_stack;
4112 /* Record the cleanup if there is one. */
4114 if (cleanup != 0)
4116 tree t;
4117 rtx seq;
4118 tree *cleanups = &thisblock->data.block.cleanups;
4119 int cond_context = conditional_context ();
4121 if (cond_context)
4123 rtx flag = gen_reg_rtx (word_mode);
4124 rtx set_flag_0;
4125 tree cond;
4127 start_sequence ();
4128 emit_move_insn (flag, const0_rtx);
4129 set_flag_0 = get_insns ();
4130 end_sequence ();
4132 thisblock->data.block.last_unconditional_cleanup
4133 = emit_insn_after (set_flag_0,
4134 thisblock->data.block.last_unconditional_cleanup);
4136 emit_move_insn (flag, const1_rtx);
4138 cond = build_decl (VAR_DECL, NULL_TREE,
4139 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4140 SET_DECL_RTL (cond, flag);
4142 /* Conditionalize the cleanup. */
4143 cleanup = build (COND_EXPR, void_type_node,
4144 (*lang_hooks.truthvalue_conversion) (cond),
4145 cleanup, integer_zero_node);
4146 cleanup = fold (cleanup);
4148 cleanups = &thisblock->data.block.cleanups;
4151 cleanup = unsave_expr (cleanup);
4153 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4155 if (! cond_context)
4156 /* If this block has a cleanup, it belongs in stack_block_stack. */
4157 stack_block_stack = thisblock;
4159 if (cond_context)
4161 start_sequence ();
4164 if (! using_eh_for_cleanups_p)
4165 TREE_ADDRESSABLE (t) = 1;
4166 else
4167 expand_eh_region_start ();
4169 if (cond_context)
4171 seq = get_insns ();
4172 end_sequence ();
4173 if (seq)
4174 thisblock->data.block.last_unconditional_cleanup
4175 = emit_insn_after (seq,
4176 thisblock->data.block.last_unconditional_cleanup);
4178 else
4180 thisblock->data.block.last_unconditional_cleanup
4181 = get_last_insn ();
4182 /* When we insert instructions after the last unconditional cleanup,
4183 we don't adjust last_insn. That means that a later add_insn will
4184 clobber the instructions we've just added. The easiest way to
4185 fix this is to just insert another instruction here, so that the
4186 instructions inserted after the last unconditional cleanup are
4187 never the last instruction. */
4188 emit_note (NOTE_INSN_DELETED);
4191 return 1;
4194 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4195 is thrown. */
4198 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
4200 int ret = expand_decl_cleanup (decl, cleanup);
4201 if (cleanup && ret)
4203 tree node = block_stack->data.block.cleanups;
4204 CLEANUP_EH_ONLY (node) = eh_only;
4206 return ret;
4209 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4210 DECL_ELTS is the list of elements that belong to DECL's type.
4211 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4213 void
4214 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
4216 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4217 rtx x;
4218 tree t;
4220 /* If any of the elements are addressable, so is the entire union. */
4221 for (t = decl_elts; t; t = TREE_CHAIN (t))
4222 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4224 TREE_ADDRESSABLE (decl) = 1;
4225 break;
4228 expand_decl (decl);
4229 expand_decl_cleanup (decl, cleanup);
4230 x = DECL_RTL (decl);
4232 /* Go through the elements, assigning RTL to each. */
4233 for (t = decl_elts; t; t = TREE_CHAIN (t))
4235 tree decl_elt = TREE_VALUE (t);
4236 tree cleanup_elt = TREE_PURPOSE (t);
4237 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4239 /* If any of the elements are addressable, so is the entire
4240 union. */
4241 if (TREE_USED (decl_elt))
4242 TREE_USED (decl) = 1;
4244 /* Propagate the union's alignment to the elements. */
4245 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4246 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4248 /* If the element has BLKmode and the union doesn't, the union is
4249 aligned such that the element doesn't need to have BLKmode, so
4250 change the element's mode to the appropriate one for its size. */
4251 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4252 DECL_MODE (decl_elt) = mode
4253 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4255 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4256 instead create a new MEM rtx with the proper mode. */
4257 if (GET_CODE (x) == MEM)
4259 if (mode == GET_MODE (x))
4260 SET_DECL_RTL (decl_elt, x);
4261 else
4262 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4264 else if (GET_CODE (x) == REG)
4266 if (mode == GET_MODE (x))
4267 SET_DECL_RTL (decl_elt, x);
4268 else
4269 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4271 else
4272 abort ();
4274 /* Record the cleanup if there is one. */
4276 if (cleanup != 0)
4277 thisblock->data.block.cleanups
4278 = tree_cons (decl_elt, cleanup_elt,
4279 thisblock->data.block.cleanups);
4283 /* Expand a list of cleanups LIST.
4284 Elements may be expressions or may be nested lists.
4286 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4287 goto and handle protection regions specially in that case.
4289 If REACHABLE, we emit code, otherwise just inform the exception handling
4290 code about this finalization. */
4292 static void
4293 expand_cleanups (tree list, int in_fixup, int reachable)
4295 tree tail;
4296 for (tail = list; tail; tail = TREE_CHAIN (tail))
4297 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4298 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
4299 else
4301 if (! in_fixup && using_eh_for_cleanups_p)
4302 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4304 if (reachable && !CLEANUP_EH_ONLY (tail))
4306 /* Cleanups may be run multiple times. For example,
4307 when exiting a binding contour, we expand the
4308 cleanups associated with that contour. When a goto
4309 within that binding contour has a target outside that
4310 contour, it will expand all cleanups from its scope to
4311 the target. Though the cleanups are expanded multiple
4312 times, the control paths are non-overlapping so the
4313 cleanups will not be executed twice. */
4315 /* We may need to protect from outer cleanups. */
4316 if (in_fixup && using_eh_for_cleanups_p)
4318 expand_eh_region_start ();
4320 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4322 expand_eh_region_end_fixup (TREE_VALUE (tail));
4324 else
4325 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4327 free_temp_slots ();
4332 /* Mark when the context we are emitting RTL for as a conditional
4333 context, so that any cleanup actions we register with
4334 expand_decl_init will be properly conditionalized when those
4335 cleanup actions are later performed. Must be called before any
4336 expression (tree) is expanded that is within a conditional context. */
4338 void
4339 start_cleanup_deferral (void)
4341 /* block_stack can be NULL if we are inside the parameter list. It is
4342 OK to do nothing, because cleanups aren't possible here. */
4343 if (block_stack)
4344 ++block_stack->data.block.conditional_code;
4347 /* Mark the end of a conditional region of code. Because cleanup
4348 deferrals may be nested, we may still be in a conditional region
4349 after we end the currently deferred cleanups, only after we end all
4350 deferred cleanups, are we back in unconditional code. */
4352 void
4353 end_cleanup_deferral (void)
4355 /* block_stack can be NULL if we are inside the parameter list. It is
4356 OK to do nothing, because cleanups aren't possible here. */
4357 if (block_stack)
4358 --block_stack->data.block.conditional_code;
4361 tree
4362 last_cleanup_this_contour (void)
4364 if (block_stack == 0)
4365 return 0;
4367 return block_stack->data.block.cleanups;
4370 /* Return 1 if there are any pending cleanups at this point.
4371 Check the current contour as well as contours that enclose
4372 the current contour. */
4375 any_pending_cleanups (void)
4377 struct nesting *block;
4379 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4380 return 0;
4382 if (block_stack->data.block.cleanups != NULL)
4383 return 1;
4385 if (block_stack->data.block.outer_cleanups == 0)
4386 return 0;
4388 for (block = block_stack->next; block; block = block->next)
4389 if (block->data.block.cleanups != 0)
4390 return 1;
4392 return 0;
4395 /* Enter a case (Pascal) or switch (C) statement.
4396 Push a block onto case_stack and nesting_stack
4397 to accumulate the case-labels that are seen
4398 and to record the labels generated for the statement.
4400 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4401 Otherwise, this construct is transparent for `exit_something'.
4403 EXPR is the index-expression to be dispatched on.
4404 TYPE is its nominal type. We could simply convert EXPR to this type,
4405 but instead we take short cuts. */
4407 void
4408 expand_start_case (int exit_flag, tree expr, tree type,
4409 const char *printname)
4411 struct nesting *thiscase = ALLOC_NESTING ();
4413 /* Make an entry on case_stack for the case we are entering. */
4415 thiscase->desc = CASE_NESTING;
4416 thiscase->next = case_stack;
4417 thiscase->all = nesting_stack;
4418 thiscase->depth = ++nesting_depth;
4419 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4420 thiscase->data.case_stmt.case_list = 0;
4421 thiscase->data.case_stmt.index_expr = expr;
4422 thiscase->data.case_stmt.nominal_type = type;
4423 thiscase->data.case_stmt.default_label = 0;
4424 thiscase->data.case_stmt.printname = printname;
4425 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4426 case_stack = thiscase;
4427 nesting_stack = thiscase;
4429 do_pending_stack_adjust ();
4430 emit_queue ();
4432 /* Make sure case_stmt.start points to something that won't
4433 need any transformation before expand_end_case. */
4434 if (GET_CODE (get_last_insn ()) != NOTE)
4435 emit_note (NOTE_INSN_DELETED);
4437 thiscase->data.case_stmt.start = get_last_insn ();
4439 start_cleanup_deferral ();
4442 /* Start a "dummy case statement" within which case labels are invalid
4443 and are not connected to any larger real case statement.
4444 This can be used if you don't want to let a case statement jump
4445 into the middle of certain kinds of constructs. */
4447 void
4448 expand_start_case_dummy (void)
4450 struct nesting *thiscase = ALLOC_NESTING ();
4452 /* Make an entry on case_stack for the dummy. */
4454 thiscase->desc = CASE_NESTING;
4455 thiscase->next = case_stack;
4456 thiscase->all = nesting_stack;
4457 thiscase->depth = ++nesting_depth;
4458 thiscase->exit_label = 0;
4459 thiscase->data.case_stmt.case_list = 0;
4460 thiscase->data.case_stmt.start = 0;
4461 thiscase->data.case_stmt.nominal_type = 0;
4462 thiscase->data.case_stmt.default_label = 0;
4463 case_stack = thiscase;
4464 nesting_stack = thiscase;
4465 start_cleanup_deferral ();
4468 static void
4469 check_seenlabel (void)
4471 /* If this is the first label, warn if any insns have been emitted. */
4472 if (case_stack->data.case_stmt.line_number_status >= 0)
4474 rtx insn;
4476 restore_line_number_status
4477 (case_stack->data.case_stmt.line_number_status);
4478 case_stack->data.case_stmt.line_number_status = -1;
4480 for (insn = case_stack->data.case_stmt.start;
4481 insn;
4482 insn = NEXT_INSN (insn))
4484 if (GET_CODE (insn) == CODE_LABEL)
4485 break;
4486 if (GET_CODE (insn) != NOTE
4487 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4490 insn = PREV_INSN (insn);
4491 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4493 /* If insn is zero, then there must have been a syntax error. */
4494 if (insn)
4496 location_t locus;
4497 locus.file = NOTE_SOURCE_FILE (insn);
4498 locus.line = NOTE_LINE_NUMBER (insn);
4499 warning ("%Hunreachable code at beginning of %s", &locus,
4500 case_stack->data.case_stmt.printname);
4502 break;
4508 /* Accumulate one case or default label inside a case or switch statement.
4509 VALUE is the value of the case (a null pointer, for a default label).
4510 The function CONVERTER, when applied to arguments T and V,
4511 converts the value V to the type T.
4513 If not currently inside a case or switch statement, return 1 and do
4514 nothing. The caller will print a language-specific error message.
4515 If VALUE is a duplicate or overlaps, return 2 and do nothing
4516 except store the (first) duplicate node in *DUPLICATE.
4517 If VALUE is out of range, return 3 and do nothing.
4518 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4519 Return 0 on success.
4521 Extended to handle range statements. */
4524 pushcase (tree value, tree (*converter) (tree, tree), tree label,
4525 tree *duplicate)
4527 tree index_type;
4528 tree nominal_type;
4530 /* Fail if not inside a real case statement. */
4531 if (! (case_stack && case_stack->data.case_stmt.start))
4532 return 1;
4534 if (stack_block_stack
4535 && stack_block_stack->depth > case_stack->depth)
4536 return 5;
4538 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4539 nominal_type = case_stack->data.case_stmt.nominal_type;
4541 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4542 if (index_type == error_mark_node)
4543 return 0;
4545 /* Convert VALUE to the type in which the comparisons are nominally done. */
4546 if (value != 0)
4547 value = (*converter) (nominal_type, value);
4549 check_seenlabel ();
4551 /* Fail if this value is out of range for the actual type of the index
4552 (which may be narrower than NOMINAL_TYPE). */
4553 if (value != 0
4554 && (TREE_CONSTANT_OVERFLOW (value)
4555 || ! int_fits_type_p (value, index_type)))
4556 return 3;
4558 return add_case_node (value, value, label, duplicate);
4561 /* Like pushcase but this case applies to all values between VALUE1 and
4562 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4563 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4564 starts at VALUE1 and ends at the highest value of the index type.
4565 If both are NULL, this case applies to all values.
4567 The return value is the same as that of pushcase but there is one
4568 additional error code: 4 means the specified range was empty. */
4571 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
4572 tree label, tree *duplicate)
4574 tree index_type;
4575 tree nominal_type;
4577 /* Fail if not inside a real case statement. */
4578 if (! (case_stack && case_stack->data.case_stmt.start))
4579 return 1;
4581 if (stack_block_stack
4582 && stack_block_stack->depth > case_stack->depth)
4583 return 5;
4585 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4586 nominal_type = case_stack->data.case_stmt.nominal_type;
4588 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4589 if (index_type == error_mark_node)
4590 return 0;
4592 check_seenlabel ();
4594 /* Convert VALUEs to type in which the comparisons are nominally done
4595 and replace any unspecified value with the corresponding bound. */
4596 if (value1 == 0)
4597 value1 = TYPE_MIN_VALUE (index_type);
4598 if (value2 == 0)
4599 value2 = TYPE_MAX_VALUE (index_type);
4601 /* Fail if the range is empty. Do this before any conversion since
4602 we want to allow out-of-range empty ranges. */
4603 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4604 return 4;
4606 /* If the max was unbounded, use the max of the nominal_type we are
4607 converting to. Do this after the < check above to suppress false
4608 positives. */
4609 if (value2 == 0)
4610 value2 = TYPE_MAX_VALUE (nominal_type);
4612 value1 = (*converter) (nominal_type, value1);
4613 value2 = (*converter) (nominal_type, value2);
4615 /* Fail if these values are out of range. */
4616 if (TREE_CONSTANT_OVERFLOW (value1)
4617 || ! int_fits_type_p (value1, index_type))
4618 return 3;
4620 if (TREE_CONSTANT_OVERFLOW (value2)
4621 || ! int_fits_type_p (value2, index_type))
4622 return 3;
4624 return add_case_node (value1, value2, label, duplicate);
4627 /* Do the actual insertion of a case label for pushcase and pushcase_range
4628 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4629 slowdown for large switch statements. */
4632 add_case_node (tree low, tree high, tree label, tree *duplicate)
4634 struct case_node *p, **q, *r;
4636 /* If there's no HIGH value, then this is not a case range; it's
4637 just a simple case label. But that's just a degenerate case
4638 range. */
4639 if (!high)
4640 high = low;
4642 /* Handle default labels specially. */
4643 if (!high && !low)
4645 if (case_stack->data.case_stmt.default_label != 0)
4647 *duplicate = case_stack->data.case_stmt.default_label;
4648 return 2;
4650 case_stack->data.case_stmt.default_label = label;
4651 expand_label (label);
4652 return 0;
4655 q = &case_stack->data.case_stmt.case_list;
4656 p = *q;
4658 while ((r = *q))
4660 p = r;
4662 /* Keep going past elements distinctly greater than HIGH. */
4663 if (tree_int_cst_lt (high, p->low))
4664 q = &p->left;
4666 /* or distinctly less than LOW. */
4667 else if (tree_int_cst_lt (p->high, low))
4668 q = &p->right;
4670 else
4672 /* We have an overlap; this is an error. */
4673 *duplicate = p->code_label;
4674 return 2;
4678 /* Add this label to the chain, and succeed. */
4680 r = ggc_alloc (sizeof (struct case_node));
4681 r->low = low;
4683 /* If the bounds are equal, turn this into the one-value case. */
4684 if (tree_int_cst_equal (low, high))
4685 r->high = r->low;
4686 else
4687 r->high = high;
4689 r->code_label = label;
4690 expand_label (label);
4692 *q = r;
4693 r->parent = p;
4694 r->left = 0;
4695 r->right = 0;
4696 r->balance = 0;
4698 while (p)
4700 struct case_node *s;
4702 if (r == p->left)
4704 int b;
4706 if (! (b = p->balance))
4707 /* Growth propagation from left side. */
4708 p->balance = -1;
4709 else if (b < 0)
4711 if (r->balance < 0)
4713 /* R-Rotation */
4714 if ((p->left = s = r->right))
4715 s->parent = p;
4717 r->right = p;
4718 p->balance = 0;
4719 r->balance = 0;
4720 s = p->parent;
4721 p->parent = r;
4723 if ((r->parent = s))
4725 if (s->left == p)
4726 s->left = r;
4727 else
4728 s->right = r;
4730 else
4731 case_stack->data.case_stmt.case_list = r;
4733 else
4734 /* r->balance == +1 */
4736 /* LR-Rotation */
4738 int b2;
4739 struct case_node *t = r->right;
4741 if ((p->left = s = t->right))
4742 s->parent = p;
4744 t->right = p;
4745 if ((r->right = s = t->left))
4746 s->parent = r;
4748 t->left = r;
4749 b = t->balance;
4750 b2 = b < 0;
4751 p->balance = b2;
4752 b2 = -b2 - b;
4753 r->balance = b2;
4754 t->balance = 0;
4755 s = p->parent;
4756 p->parent = t;
4757 r->parent = t;
4759 if ((t->parent = s))
4761 if (s->left == p)
4762 s->left = t;
4763 else
4764 s->right = t;
4766 else
4767 case_stack->data.case_stmt.case_list = t;
4769 break;
4772 else
4774 /* p->balance == +1; growth of left side balances the node. */
4775 p->balance = 0;
4776 break;
4779 else
4780 /* r == p->right */
4782 int b;
4784 if (! (b = p->balance))
4785 /* Growth propagation from right side. */
4786 p->balance++;
4787 else if (b > 0)
4789 if (r->balance > 0)
4791 /* L-Rotation */
4793 if ((p->right = s = r->left))
4794 s->parent = p;
4796 r->left = p;
4797 p->balance = 0;
4798 r->balance = 0;
4799 s = p->parent;
4800 p->parent = r;
4801 if ((r->parent = s))
4803 if (s->left == p)
4804 s->left = r;
4805 else
4806 s->right = r;
4809 else
4810 case_stack->data.case_stmt.case_list = r;
4813 else
4814 /* r->balance == -1 */
4816 /* RL-Rotation */
4817 int b2;
4818 struct case_node *t = r->left;
4820 if ((p->right = s = t->left))
4821 s->parent = p;
4823 t->left = p;
4825 if ((r->left = s = t->right))
4826 s->parent = r;
4828 t->right = r;
4829 b = t->balance;
4830 b2 = b < 0;
4831 r->balance = b2;
4832 b2 = -b2 - b;
4833 p->balance = b2;
4834 t->balance = 0;
4835 s = p->parent;
4836 p->parent = t;
4837 r->parent = t;
4839 if ((t->parent = s))
4841 if (s->left == p)
4842 s->left = t;
4843 else
4844 s->right = t;
4847 else
4848 case_stack->data.case_stmt.case_list = t;
4850 break;
4852 else
4854 /* p->balance == -1; growth of right side balances the node. */
4855 p->balance = 0;
4856 break;
4860 r = p;
4861 p = p->parent;
4864 return 0;
4867 /* Returns the number of possible values of TYPE.
4868 Returns -1 if the number is unknown, variable, or if the number does not
4869 fit in a HOST_WIDE_INT.
4870 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4871 do not increase monotonically (there may be duplicates);
4872 to 1 if the values increase monotonically, but not always by 1;
4873 otherwise sets it to 0. */
4875 HOST_WIDE_INT
4876 all_cases_count (tree type, int *sparseness)
4878 tree t;
4879 HOST_WIDE_INT count, minval, lastval;
4881 *sparseness = 0;
4883 switch (TREE_CODE (type))
4885 case BOOLEAN_TYPE:
4886 count = 2;
4887 break;
4889 case CHAR_TYPE:
4890 count = 1 << BITS_PER_UNIT;
4891 break;
4893 default:
4894 case INTEGER_TYPE:
4895 if (TYPE_MAX_VALUE (type) != 0
4896 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4897 TYPE_MIN_VALUE (type))))
4898 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4899 convert (type, integer_zero_node))))
4900 && host_integerp (t, 1))
4901 count = tree_low_cst (t, 1);
4902 else
4903 return -1;
4904 break;
4906 case ENUMERAL_TYPE:
4907 /* Don't waste time with enumeral types with huge values. */
4908 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4909 || TYPE_MAX_VALUE (type) == 0
4910 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4911 return -1;
4913 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4914 count = 0;
4916 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4918 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4920 if (*sparseness == 2 || thisval <= lastval)
4921 *sparseness = 2;
4922 else if (thisval != minval + count)
4923 *sparseness = 1;
4925 lastval = thisval;
4926 count++;
4930 return count;
4933 #define BITARRAY_TEST(ARRAY, INDEX) \
4934 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4935 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4936 #define BITARRAY_SET(ARRAY, INDEX) \
4937 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4938 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4940 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4941 with the case values we have seen, assuming the case expression
4942 has the given TYPE.
4943 SPARSENESS is as determined by all_cases_count.
4945 The time needed is proportional to COUNT, unless
4946 SPARSENESS is 2, in which case quadratic time is needed. */
4948 void
4949 mark_seen_cases (tree type, unsigned char *cases_seen, HOST_WIDE_INT count,
4950 int sparseness)
4952 tree next_node_to_try = NULL_TREE;
4953 HOST_WIDE_INT next_node_offset = 0;
4955 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4956 tree val = make_node (INTEGER_CST);
4958 TREE_TYPE (val) = type;
4959 if (! root)
4960 /* Do nothing. */
4962 else if (sparseness == 2)
4964 tree t;
4965 unsigned HOST_WIDE_INT xlo;
4967 /* This less efficient loop is only needed to handle
4968 duplicate case values (multiple enum constants
4969 with the same value). */
4970 TREE_TYPE (val) = TREE_TYPE (root->low);
4971 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4972 t = TREE_CHAIN (t), xlo++)
4974 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4975 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4976 n = root;
4979 /* Keep going past elements distinctly greater than VAL. */
4980 if (tree_int_cst_lt (val, n->low))
4981 n = n->left;
4983 /* or distinctly less than VAL. */
4984 else if (tree_int_cst_lt (n->high, val))
4985 n = n->right;
4987 else
4989 /* We have found a matching range. */
4990 BITARRAY_SET (cases_seen, xlo);
4991 break;
4994 while (n);
4997 else
4999 if (root->left)
5000 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5002 for (n = root; n; n = n->right)
5004 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5005 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5006 while (! tree_int_cst_lt (n->high, val))
5008 /* Calculate (into xlo) the "offset" of the integer (val).
5009 The element with lowest value has offset 0, the next smallest
5010 element has offset 1, etc. */
5012 unsigned HOST_WIDE_INT xlo;
5013 HOST_WIDE_INT xhi;
5014 tree t;
5016 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5018 /* The TYPE_VALUES will be in increasing order, so
5019 starting searching where we last ended. */
5020 t = next_node_to_try;
5021 xlo = next_node_offset;
5022 xhi = 0;
5023 for (;;)
5025 if (t == NULL_TREE)
5027 t = TYPE_VALUES (type);
5028 xlo = 0;
5030 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5032 next_node_to_try = TREE_CHAIN (t);
5033 next_node_offset = xlo + 1;
5034 break;
5036 xlo++;
5037 t = TREE_CHAIN (t);
5038 if (t == next_node_to_try)
5040 xlo = -1;
5041 break;
5045 else
5047 t = TYPE_MIN_VALUE (type);
5048 if (t)
5049 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5050 &xlo, &xhi);
5051 else
5052 xlo = xhi = 0;
5053 add_double (xlo, xhi,
5054 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5055 &xlo, &xhi);
5058 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5059 BITARRAY_SET (cases_seen, xlo);
5061 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5062 1, 0,
5063 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5069 /* Given a switch statement with an expression that is an enumeration
5070 type, warn if any of the enumeration type's literals are not
5071 covered by the case expressions of the switch. Also, warn if there
5072 are any extra switch cases that are *not* elements of the
5073 enumerated type.
5075 Historical note:
5077 At one stage this function would: ``If all enumeration literals
5078 were covered by the case expressions, turn one of the expressions
5079 into the default expression since it should not be possible to fall
5080 through such a switch.''
5082 That code has since been removed as: ``This optimization is
5083 disabled because it causes valid programs to fail. ANSI C does not
5084 guarantee that an expression with enum type will have a value that
5085 is the same as one of the enumeration literals.'' */
5087 void
5088 check_for_full_enumeration_handling (tree type)
5090 struct case_node *n;
5091 tree chain;
5093 /* True iff the selector type is a numbered set mode. */
5094 int sparseness = 0;
5096 /* The number of possible selector values. */
5097 HOST_WIDE_INT size;
5099 /* For each possible selector value. a one iff it has been matched
5100 by a case value alternative. */
5101 unsigned char *cases_seen;
5103 /* The allocated size of cases_seen, in chars. */
5104 HOST_WIDE_INT bytes_needed;
5106 size = all_cases_count (type, &sparseness);
5107 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5109 if (size > 0 && size < 600000
5110 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5111 this optimization if we don't have enough memory rather than
5112 aborting, as xmalloc would do. */
5113 && (cases_seen = really_call_calloc (bytes_needed, 1)) != NULL)
5115 HOST_WIDE_INT i;
5116 tree v = TYPE_VALUES (type);
5118 /* The time complexity of this code is normally O(N), where
5119 N being the number of members in the enumerated type.
5120 However, if type is an ENUMERAL_TYPE whose values do not
5121 increase monotonically, O(N*log(N)) time may be needed. */
5123 mark_seen_cases (type, cases_seen, size, sparseness);
5125 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5126 if (BITARRAY_TEST (cases_seen, i) == 0)
5127 warning ("enumeration value `%s' not handled in switch",
5128 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5130 free (cases_seen);
5133 /* Now we go the other way around; we warn if there are case
5134 expressions that don't correspond to enumerators. This can
5135 occur since C and C++ don't enforce type-checking of
5136 assignments to enumeration variables. */
5138 if (case_stack->data.case_stmt.case_list
5139 && case_stack->data.case_stmt.case_list->left)
5140 case_stack->data.case_stmt.case_list
5141 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5142 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5144 for (chain = TYPE_VALUES (type);
5145 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5146 chain = TREE_CHAIN (chain))
5149 if (!chain)
5151 if (TYPE_NAME (type) == 0)
5152 warning ("case value `%ld' not in enumerated type",
5153 (long) TREE_INT_CST_LOW (n->low));
5154 else
5155 warning ("case value `%ld' not in enumerated type `%s'",
5156 (long) TREE_INT_CST_LOW (n->low),
5157 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5158 == IDENTIFIER_NODE)
5159 ? TYPE_NAME (type)
5160 : DECL_NAME (TYPE_NAME (type))));
5162 if (!tree_int_cst_equal (n->low, n->high))
5164 for (chain = TYPE_VALUES (type);
5165 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5166 chain = TREE_CHAIN (chain))
5169 if (!chain)
5171 if (TYPE_NAME (type) == 0)
5172 warning ("case value `%ld' not in enumerated type",
5173 (long) TREE_INT_CST_LOW (n->high));
5174 else
5175 warning ("case value `%ld' not in enumerated type `%s'",
5176 (long) TREE_INT_CST_LOW (n->high),
5177 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5178 == IDENTIFIER_NODE)
5179 ? TYPE_NAME (type)
5180 : DECL_NAME (TYPE_NAME (type))));
5187 /* Maximum number of case bit tests. */
5188 #define MAX_CASE_BIT_TESTS 3
5190 /* By default, enable case bit tests on targets with ashlsi3. */
5191 #ifndef CASE_USE_BIT_TESTS
5192 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
5193 != CODE_FOR_nothing)
5194 #endif
5197 /* A case_bit_test represents a set of case nodes that may be
5198 selected from using a bit-wise comparison. HI and LO hold
5199 the integer to be tested against, LABEL contains the label
5200 to jump to upon success and BITS counts the number of case
5201 nodes handled by this test, typically the number of bits
5202 set in HI:LO. */
5204 struct case_bit_test
5206 HOST_WIDE_INT hi;
5207 HOST_WIDE_INT lo;
5208 rtx label;
5209 int bits;
5212 /* Determine whether "1 << x" is relatively cheap in word_mode. */
5214 static
5215 bool lshift_cheap_p (void)
5217 static bool init = false;
5218 static bool cheap = true;
5220 if (!init)
5222 rtx reg = gen_rtx_REG (word_mode, 10000);
5223 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
5224 cheap = cost < COSTS_N_INSNS (3);
5225 init = true;
5228 return cheap;
5231 /* Comparison function for qsort to order bit tests by decreasing
5232 number of case nodes, i.e. the node with the most cases gets
5233 tested first. */
5235 static
5236 int case_bit_test_cmp (const void *p1, const void *p2)
5238 const struct case_bit_test *d1 = p1;
5239 const struct case_bit_test *d2 = p2;
5241 return d2->bits - d1->bits;
5244 /* Expand a switch statement by a short sequence of bit-wise
5245 comparisons. "switch(x)" is effectively converted into
5246 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
5247 integer constants.
5249 INDEX_EXPR is the value being switched on, which is of
5250 type INDEX_TYPE. MINVAL is the lowest case value of in
5251 the case nodes, of INDEX_TYPE type, and RANGE is highest
5252 value minus MINVAL, also of type INDEX_TYPE. NODES is
5253 the set of case nodes, and DEFAULT_LABEL is the label to
5254 branch to should none of the cases match.
5256 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
5257 node targets. */
5259 static void
5260 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
5261 tree range, case_node_ptr nodes, rtx default_label)
5263 struct case_bit_test test[MAX_CASE_BIT_TESTS];
5264 enum machine_mode mode;
5265 rtx expr, index, label;
5266 unsigned int i,j,lo,hi;
5267 struct case_node *n;
5268 unsigned int count;
5270 count = 0;
5271 for (n = nodes; n; n = n->right)
5273 label = label_rtx (n->code_label);
5274 for (i = 0; i < count; i++)
5275 if (same_case_target_p (label, test[i].label))
5276 break;
5278 if (i == count)
5280 if (count >= MAX_CASE_BIT_TESTS)
5281 abort ();
5282 test[i].hi = 0;
5283 test[i].lo = 0;
5284 test[i].label = label;
5285 test[i].bits = 1;
5286 count++;
5288 else
5289 test[i].bits++;
5291 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5292 n->low, minval)), 1);
5293 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5294 n->high, minval)), 1);
5295 for (j = lo; j <= hi; j++)
5296 if (j >= HOST_BITS_PER_WIDE_INT)
5297 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
5298 else
5299 test[i].lo |= (HOST_WIDE_INT) 1 << j;
5302 qsort (test, count, sizeof(*test), case_bit_test_cmp);
5304 index_expr = fold (build (MINUS_EXPR, index_type,
5305 convert (index_type, index_expr),
5306 convert (index_type, minval)));
5307 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5308 emit_queue ();
5309 index = protect_from_queue (index, 0);
5310 do_pending_stack_adjust ();
5312 mode = TYPE_MODE (index_type);
5313 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
5314 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
5315 default_label);
5317 index = convert_to_mode (word_mode, index, 0);
5318 index = expand_binop (word_mode, ashl_optab, const1_rtx,
5319 index, NULL_RTX, 1, OPTAB_WIDEN);
5321 for (i = 0; i < count; i++)
5323 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
5324 expr = expand_binop (word_mode, and_optab, index, expr,
5325 NULL_RTX, 1, OPTAB_WIDEN);
5326 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
5327 word_mode, 1, test[i].label);
5330 emit_jump (default_label);
5333 /* Terminate a case (Pascal) or switch (C) statement
5334 in which ORIG_INDEX is the expression to be tested.
5335 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5336 type as given in the source before any compiler conversions.
5337 Generate the code to test it and jump to the right place. */
5339 void
5340 expand_end_case_type (tree orig_index, tree orig_type)
5342 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5343 rtx default_label = 0;
5344 struct case_node *n, *m;
5345 unsigned int count, uniq;
5346 rtx index;
5347 rtx table_label;
5348 int ncases;
5349 rtx *labelvec;
5350 int i;
5351 rtx before_case, end, lab;
5352 struct nesting *thiscase = case_stack;
5353 tree index_expr, index_type;
5354 bool exit_done = false;
5355 int unsignedp;
5357 /* Don't crash due to previous errors. */
5358 if (thiscase == NULL)
5359 return;
5361 index_expr = thiscase->data.case_stmt.index_expr;
5362 index_type = TREE_TYPE (index_expr);
5363 unsignedp = TREE_UNSIGNED (index_type);
5364 if (orig_type == NULL)
5365 orig_type = TREE_TYPE (orig_index);
5367 do_pending_stack_adjust ();
5369 /* This might get a spurious warning in the presence of a syntax error;
5370 it could be fixed by moving the call to check_seenlabel after the
5371 check for error_mark_node, and copying the code of check_seenlabel that
5372 deals with case_stack->data.case_stmt.line_number_status /
5373 restore_line_number_status in front of the call to end_cleanup_deferral;
5374 However, this might miss some useful warnings in the presence of
5375 non-syntax errors. */
5376 check_seenlabel ();
5378 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5379 if (index_type != error_mark_node)
5381 /* If the switch expression was an enumerated type, check that
5382 exactly all enumeration literals are covered by the cases.
5383 The check is made when -Wswitch was specified and there is no
5384 default case, or when -Wswitch-enum was specified. */
5385 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5386 || warn_switch_enum)
5387 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5388 && TREE_CODE (index_expr) != INTEGER_CST)
5389 check_for_full_enumeration_handling (orig_type);
5391 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5392 warning ("switch missing default case");
5394 /* If we don't have a default-label, create one here,
5395 after the body of the switch. */
5396 if (thiscase->data.case_stmt.default_label == 0)
5398 thiscase->data.case_stmt.default_label
5399 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5400 /* Share the exit label if possible. */
5401 if (thiscase->exit_label)
5403 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
5404 thiscase->exit_label);
5405 exit_done = true;
5407 expand_label (thiscase->data.case_stmt.default_label);
5409 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5411 before_case = get_last_insn ();
5413 if (thiscase->data.case_stmt.case_list
5414 && thiscase->data.case_stmt.case_list->left)
5415 thiscase->data.case_stmt.case_list
5416 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5418 /* Simplify the case-list before we count it. */
5419 group_case_nodes (thiscase->data.case_stmt.case_list);
5420 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5421 default_label);
5423 /* Get upper and lower bounds of case values.
5424 Also convert all the case values to the index expr's data type. */
5426 uniq = 0;
5427 count = 0;
5428 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5430 /* Check low and high label values are integers. */
5431 if (TREE_CODE (n->low) != INTEGER_CST)
5432 abort ();
5433 if (TREE_CODE (n->high) != INTEGER_CST)
5434 abort ();
5436 n->low = convert (index_type, n->low);
5437 n->high = convert (index_type, n->high);
5439 /* Count the elements and track the largest and smallest
5440 of them (treating them as signed even if they are not). */
5441 if (count++ == 0)
5443 minval = n->low;
5444 maxval = n->high;
5446 else
5448 if (INT_CST_LT (n->low, minval))
5449 minval = n->low;
5450 if (INT_CST_LT (maxval, n->high))
5451 maxval = n->high;
5453 /* A range counts double, since it requires two compares. */
5454 if (! tree_int_cst_equal (n->low, n->high))
5455 count++;
5457 /* Count the number of unique case node targets. */
5458 uniq++;
5459 lab = label_rtx (n->code_label);
5460 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
5461 if (same_case_target_p (label_rtx (m->code_label), lab))
5463 uniq--;
5464 break;
5468 /* Compute span of values. */
5469 if (count != 0)
5470 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5472 end_cleanup_deferral ();
5474 if (count == 0)
5476 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5477 emit_queue ();
5478 emit_jump (default_label);
5481 /* Try implementing this switch statement by a short sequence of
5482 bit-wise comparisons. However, we let the binary-tree case
5483 below handle constant index expressions. */
5484 else if (CASE_USE_BIT_TESTS
5485 && ! TREE_CONSTANT (index_expr)
5486 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
5487 && compare_tree_int (range, 0) > 0
5488 && lshift_cheap_p ()
5489 && ((uniq == 1 && count >= 3)
5490 || (uniq == 2 && count >= 5)
5491 || (uniq == 3 && count >= 6)))
5493 /* Optimize the case where all the case values fit in a
5494 word without having to subtract MINVAL. In this case,
5495 we can optimize away the subtraction. */
5496 if (compare_tree_int (minval, 0) > 0
5497 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
5499 minval = integer_zero_node;
5500 range = maxval;
5502 emit_case_bit_tests (index_type, index_expr, minval, range,
5503 thiscase->data.case_stmt.case_list,
5504 default_label);
5507 /* If range of values is much bigger than number of values,
5508 make a sequence of conditional branches instead of a dispatch.
5509 If the switch-index is a constant, do it this way
5510 because we can optimize it. */
5512 else if (count < case_values_threshold ()
5513 || compare_tree_int (range,
5514 (optimize_size ? 3 : 10) * count) > 0
5515 /* RANGE may be signed, and really large ranges will show up
5516 as negative numbers. */
5517 || compare_tree_int (range, 0) < 0
5518 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5519 || flag_pic
5520 #endif
5521 || TREE_CONSTANT (index_expr))
5523 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5525 /* If the index is a short or char that we do not have
5526 an insn to handle comparisons directly, convert it to
5527 a full integer now, rather than letting each comparison
5528 generate the conversion. */
5530 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5531 && ! have_insn_for (COMPARE, GET_MODE (index)))
5533 enum machine_mode wider_mode;
5534 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5535 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5536 if (have_insn_for (COMPARE, wider_mode))
5538 index = convert_to_mode (wider_mode, index, unsignedp);
5539 break;
5543 emit_queue ();
5544 do_pending_stack_adjust ();
5546 index = protect_from_queue (index, 0);
5547 if (GET_CODE (index) == MEM)
5548 index = copy_to_reg (index);
5549 if (GET_CODE (index) == CONST_INT
5550 || TREE_CODE (index_expr) == INTEGER_CST)
5552 /* Make a tree node with the proper constant value
5553 if we don't already have one. */
5554 if (TREE_CODE (index_expr) != INTEGER_CST)
5556 index_expr
5557 = build_int_2 (INTVAL (index),
5558 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5559 index_expr = convert (index_type, index_expr);
5562 /* For constant index expressions we need only
5563 issue an unconditional branch to the appropriate
5564 target code. The job of removing any unreachable
5565 code is left to the optimization phase if the
5566 "-O" option is specified. */
5567 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5568 if (! tree_int_cst_lt (index_expr, n->low)
5569 && ! tree_int_cst_lt (n->high, index_expr))
5570 break;
5572 if (n)
5573 emit_jump (label_rtx (n->code_label));
5574 else
5575 emit_jump (default_label);
5577 else
5579 /* If the index expression is not constant we generate
5580 a binary decision tree to select the appropriate
5581 target code. This is done as follows:
5583 The list of cases is rearranged into a binary tree,
5584 nearly optimal assuming equal probability for each case.
5586 The tree is transformed into RTL, eliminating
5587 redundant test conditions at the same time.
5589 If program flow could reach the end of the
5590 decision tree an unconditional jump to the
5591 default code is emitted. */
5593 use_cost_table
5594 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5595 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5596 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5597 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5598 default_label, index_type);
5599 emit_jump_if_reachable (default_label);
5602 else
5604 table_label = gen_label_rtx ();
5605 if (! try_casesi (index_type, index_expr, minval, range,
5606 table_label, default_label))
5608 index_type = thiscase->data.case_stmt.nominal_type;
5610 /* Index jumptables from zero for suitable values of
5611 minval to avoid a subtraction. */
5612 if (! optimize_size
5613 && compare_tree_int (minval, 0) > 0
5614 && compare_tree_int (minval, 3) < 0)
5616 minval = integer_zero_node;
5617 range = maxval;
5620 if (! try_tablejump (index_type, index_expr, minval, range,
5621 table_label, default_label))
5622 abort ();
5625 /* Get table of labels to jump to, in order of case index. */
5627 ncases = tree_low_cst (range, 0) + 1;
5628 labelvec = alloca (ncases * sizeof (rtx));
5629 memset (labelvec, 0, ncases * sizeof (rtx));
5631 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5633 /* Compute the low and high bounds relative to the minimum
5634 value since that should fit in a HOST_WIDE_INT while the
5635 actual values may not. */
5636 HOST_WIDE_INT i_low
5637 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5638 n->low, minval)), 1);
5639 HOST_WIDE_INT i_high
5640 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5641 n->high, minval)), 1);
5642 HOST_WIDE_INT i;
5644 for (i = i_low; i <= i_high; i ++)
5645 labelvec[i]
5646 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5649 /* Fill in the gaps with the default. */
5650 for (i = 0; i < ncases; i++)
5651 if (labelvec[i] == 0)
5652 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5654 /* Output the table. */
5655 emit_label (table_label);
5657 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5658 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5659 gen_rtx_LABEL_REF (Pmode, table_label),
5660 gen_rtvec_v (ncases, labelvec),
5661 const0_rtx, const0_rtx));
5662 else
5663 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5664 gen_rtvec_v (ncases, labelvec)));
5666 /* If the case insn drops through the table,
5667 after the table we must jump to the default-label.
5668 Otherwise record no drop-through after the table. */
5669 #ifdef CASE_DROPS_THROUGH
5670 emit_jump (default_label);
5671 #else
5672 emit_barrier ();
5673 #endif
5676 before_case = NEXT_INSN (before_case);
5677 end = get_last_insn ();
5678 if (squeeze_notes (&before_case, &end))
5679 abort ();
5680 reorder_insns (before_case, end,
5681 thiscase->data.case_stmt.start);
5683 else
5684 end_cleanup_deferral ();
5686 if (thiscase->exit_label && !exit_done)
5687 emit_label (thiscase->exit_label);
5689 POPSTACK (case_stack);
5691 free_temp_slots ();
5694 /* Convert the tree NODE into a list linked by the right field, with the left
5695 field zeroed. RIGHT is used for recursion; it is a list to be placed
5696 rightmost in the resulting list. */
5698 static struct case_node *
5699 case_tree2list (struct case_node *node, struct case_node *right)
5701 struct case_node *left;
5703 if (node->right)
5704 right = case_tree2list (node->right, right);
5706 node->right = right;
5707 if ((left = node->left))
5709 node->left = 0;
5710 return case_tree2list (left, node);
5713 return node;
5716 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5718 static void
5719 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
5721 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5723 if (op1 == op2)
5724 emit_jump (label);
5726 else
5727 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5728 (GET_MODE (op1) == VOIDmode
5729 ? GET_MODE (op2) : GET_MODE (op1)),
5730 unsignedp, label);
5733 /* Not all case values are encountered equally. This function
5734 uses a heuristic to weight case labels, in cases where that
5735 looks like a reasonable thing to do.
5737 Right now, all we try to guess is text, and we establish the
5738 following weights:
5740 chars above space: 16
5741 digits: 16
5742 default: 12
5743 space, punct: 8
5744 tab: 4
5745 newline: 2
5746 other "\" chars: 1
5747 remaining chars: 0
5749 If we find any cases in the switch that are not either -1 or in the range
5750 of valid ASCII characters, or are control characters other than those
5751 commonly used with "\", don't treat this switch scanning text.
5753 Return 1 if these nodes are suitable for cost estimation, otherwise
5754 return 0. */
5756 static int
5757 estimate_case_costs (case_node_ptr node)
5759 tree min_ascii = integer_minus_one_node;
5760 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5761 case_node_ptr n;
5762 int i;
5764 /* If we haven't already made the cost table, make it now. Note that the
5765 lower bound of the table is -1, not zero. */
5767 if (! cost_table_initialized)
5769 cost_table_initialized = 1;
5771 for (i = 0; i < 128; i++)
5773 if (ISALNUM (i))
5774 COST_TABLE (i) = 16;
5775 else if (ISPUNCT (i))
5776 COST_TABLE (i) = 8;
5777 else if (ISCNTRL (i))
5778 COST_TABLE (i) = -1;
5781 COST_TABLE (' ') = 8;
5782 COST_TABLE ('\t') = 4;
5783 COST_TABLE ('\0') = 4;
5784 COST_TABLE ('\n') = 2;
5785 COST_TABLE ('\f') = 1;
5786 COST_TABLE ('\v') = 1;
5787 COST_TABLE ('\b') = 1;
5790 /* See if all the case expressions look like text. It is text if the
5791 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5792 as signed arithmetic since we don't want to ever access cost_table with a
5793 value less than -1. Also check that none of the constants in a range
5794 are strange control characters. */
5796 for (n = node; n; n = n->right)
5798 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5799 return 0;
5801 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5802 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5803 if (COST_TABLE (i) < 0)
5804 return 0;
5807 /* All interesting values are within the range of interesting
5808 ASCII characters. */
5809 return 1;
5812 /* Determine whether two case labels branch to the same target. */
5814 static bool
5815 same_case_target_p (rtx l1, rtx l2)
5817 rtx i1, i2;
5819 if (l1 == l2)
5820 return true;
5822 i1 = next_real_insn (l1);
5823 i2 = next_real_insn (l2);
5824 if (i1 == i2)
5825 return true;
5827 if (i1 && simplejump_p (i1))
5829 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5832 if (i2 && simplejump_p (i2))
5834 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5836 return l1 == l2;
5839 /* Delete nodes that branch to the default label from a list of
5840 case nodes. Eg. case 5: default: becomes just default: */
5842 static void
5843 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
5845 case_node_ptr ptr;
5847 while (*prev)
5849 ptr = *prev;
5850 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5851 *prev = ptr->right;
5852 else
5853 prev = &ptr->right;
5857 /* Scan an ordered list of case nodes
5858 combining those with consecutive values or ranges.
5860 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5862 static void
5863 group_case_nodes (case_node_ptr head)
5865 case_node_ptr node = head;
5867 while (node)
5869 rtx lab = label_rtx (node->code_label);
5870 case_node_ptr np = node;
5872 /* Try to group the successors of NODE with NODE. */
5873 while (((np = np->right) != 0)
5874 /* Do they jump to the same place? */
5875 && same_case_target_p (label_rtx (np->code_label), lab)
5876 /* Are their ranges consecutive? */
5877 && tree_int_cst_equal (np->low,
5878 fold (build (PLUS_EXPR,
5879 TREE_TYPE (node->high),
5880 node->high,
5881 integer_one_node)))
5882 /* An overflow is not consecutive. */
5883 && tree_int_cst_lt (node->high,
5884 fold (build (PLUS_EXPR,
5885 TREE_TYPE (node->high),
5886 node->high,
5887 integer_one_node))))
5889 node->high = np->high;
5891 /* NP is the first node after NODE which can't be grouped with it.
5892 Delete the nodes in between, and move on to that node. */
5893 node->right = np;
5894 node = np;
5898 /* Take an ordered list of case nodes
5899 and transform them into a near optimal binary tree,
5900 on the assumption that any target code selection value is as
5901 likely as any other.
5903 The transformation is performed by splitting the ordered
5904 list into two equal sections plus a pivot. The parts are
5905 then attached to the pivot as left and right branches. Each
5906 branch is then transformed recursively. */
5908 static void
5909 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
5911 case_node_ptr np;
5913 np = *head;
5914 if (np)
5916 int cost = 0;
5917 int i = 0;
5918 int ranges = 0;
5919 case_node_ptr *npp;
5920 case_node_ptr left;
5922 /* Count the number of entries on branch. Also count the ranges. */
5924 while (np)
5926 if (!tree_int_cst_equal (np->low, np->high))
5928 ranges++;
5929 if (use_cost_table)
5930 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5933 if (use_cost_table)
5934 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5936 i++;
5937 np = np->right;
5940 if (i > 2)
5942 /* Split this list if it is long enough for that to help. */
5943 npp = head;
5944 left = *npp;
5945 if (use_cost_table)
5947 /* Find the place in the list that bisects the list's total cost,
5948 Here I gets half the total cost. */
5949 int n_moved = 0;
5950 i = (cost + 1) / 2;
5951 while (1)
5953 /* Skip nodes while their cost does not reach that amount. */
5954 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5955 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5956 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5957 if (i <= 0)
5958 break;
5959 npp = &(*npp)->right;
5960 n_moved += 1;
5962 if (n_moved == 0)
5964 /* Leave this branch lopsided, but optimize left-hand
5965 side and fill in `parent' fields for right-hand side. */
5966 np = *head;
5967 np->parent = parent;
5968 balance_case_nodes (&np->left, np);
5969 for (; np->right; np = np->right)
5970 np->right->parent = np;
5971 return;
5974 /* If there are just three nodes, split at the middle one. */
5975 else if (i == 3)
5976 npp = &(*npp)->right;
5977 else
5979 /* Find the place in the list that bisects the list's total cost,
5980 where ranges count as 2.
5981 Here I gets half the total cost. */
5982 i = (i + ranges + 1) / 2;
5983 while (1)
5985 /* Skip nodes while their cost does not reach that amount. */
5986 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5987 i--;
5988 i--;
5989 if (i <= 0)
5990 break;
5991 npp = &(*npp)->right;
5994 *head = np = *npp;
5995 *npp = 0;
5996 np->parent = parent;
5997 np->left = left;
5999 /* Optimize each of the two split parts. */
6000 balance_case_nodes (&np->left, np);
6001 balance_case_nodes (&np->right, np);
6003 else
6005 /* Else leave this branch as one level,
6006 but fill in `parent' fields. */
6007 np = *head;
6008 np->parent = parent;
6009 for (; np->right; np = np->right)
6010 np->right->parent = np;
6015 /* Search the parent sections of the case node tree
6016 to see if a test for the lower bound of NODE would be redundant.
6017 INDEX_TYPE is the type of the index expression.
6019 The instructions to generate the case decision tree are
6020 output in the same order as nodes are processed so it is
6021 known that if a parent node checks the range of the current
6022 node minus one that the current node is bounded at its lower
6023 span. Thus the test would be redundant. */
6025 static int
6026 node_has_low_bound (case_node_ptr node, tree index_type)
6028 tree low_minus_one;
6029 case_node_ptr pnode;
6031 /* If the lower bound of this node is the lowest value in the index type,
6032 we need not test it. */
6034 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6035 return 1;
6037 /* If this node has a left branch, the value at the left must be less
6038 than that at this node, so it cannot be bounded at the bottom and
6039 we need not bother testing any further. */
6041 if (node->left)
6042 return 0;
6044 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6045 node->low, integer_one_node));
6047 /* If the subtraction above overflowed, we can't verify anything.
6048 Otherwise, look for a parent that tests our value - 1. */
6050 if (! tree_int_cst_lt (low_minus_one, node->low))
6051 return 0;
6053 for (pnode = node->parent; pnode; pnode = pnode->parent)
6054 if (tree_int_cst_equal (low_minus_one, pnode->high))
6055 return 1;
6057 return 0;
6060 /* Search the parent sections of the case node tree
6061 to see if a test for the upper bound of NODE would be redundant.
6062 INDEX_TYPE is the type of the index expression.
6064 The instructions to generate the case decision tree are
6065 output in the same order as nodes are processed so it is
6066 known that if a parent node checks the range of the current
6067 node plus one that the current node is bounded at its upper
6068 span. Thus the test would be redundant. */
6070 static int
6071 node_has_high_bound (case_node_ptr node, tree index_type)
6073 tree high_plus_one;
6074 case_node_ptr pnode;
6076 /* If there is no upper bound, obviously no test is needed. */
6078 if (TYPE_MAX_VALUE (index_type) == NULL)
6079 return 1;
6081 /* If the upper bound of this node is the highest value in the type
6082 of the index expression, we need not test against it. */
6084 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6085 return 1;
6087 /* If this node has a right branch, the value at the right must be greater
6088 than that at this node, so it cannot be bounded at the top and
6089 we need not bother testing any further. */
6091 if (node->right)
6092 return 0;
6094 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6095 node->high, integer_one_node));
6097 /* If the addition above overflowed, we can't verify anything.
6098 Otherwise, look for a parent that tests our value + 1. */
6100 if (! tree_int_cst_lt (node->high, high_plus_one))
6101 return 0;
6103 for (pnode = node->parent; pnode; pnode = pnode->parent)
6104 if (tree_int_cst_equal (high_plus_one, pnode->low))
6105 return 1;
6107 return 0;
6110 /* Search the parent sections of the
6111 case node tree to see if both tests for the upper and lower
6112 bounds of NODE would be redundant. */
6114 static int
6115 node_is_bounded (case_node_ptr node, tree index_type)
6117 return (node_has_low_bound (node, index_type)
6118 && node_has_high_bound (node, index_type));
6121 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6123 static void
6124 emit_jump_if_reachable (rtx label)
6126 if (GET_CODE (get_last_insn ()) != BARRIER)
6127 emit_jump (label);
6130 /* Emit step-by-step code to select a case for the value of INDEX.
6131 The thus generated decision tree follows the form of the
6132 case-node binary tree NODE, whose nodes represent test conditions.
6133 INDEX_TYPE is the type of the index of the switch.
6135 Care is taken to prune redundant tests from the decision tree
6136 by detecting any boundary conditions already checked by
6137 emitted rtx. (See node_has_high_bound, node_has_low_bound
6138 and node_is_bounded, above.)
6140 Where the test conditions can be shown to be redundant we emit
6141 an unconditional jump to the target code. As a further
6142 optimization, the subordinates of a tree node are examined to
6143 check for bounded nodes. In this case conditional and/or
6144 unconditional jumps as a result of the boundary check for the
6145 current node are arranged to target the subordinates associated
6146 code for out of bound conditions on the current node.
6148 We can assume that when control reaches the code generated here,
6149 the index value has already been compared with the parents
6150 of this node, and determined to be on the same side of each parent
6151 as this node is. Thus, if this node tests for the value 51,
6152 and a parent tested for 52, we don't need to consider
6153 the possibility of a value greater than 51. If another parent
6154 tests for the value 50, then this node need not test anything. */
6156 static void
6157 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
6158 tree index_type)
6160 /* If INDEX has an unsigned type, we must make unsigned branches. */
6161 int unsignedp = TREE_UNSIGNED (index_type);
6162 enum machine_mode mode = GET_MODE (index);
6163 enum machine_mode imode = TYPE_MODE (index_type);
6165 /* See if our parents have already tested everything for us.
6166 If they have, emit an unconditional jump for this node. */
6167 if (node_is_bounded (node, index_type))
6168 emit_jump (label_rtx (node->code_label));
6170 else if (tree_int_cst_equal (node->low, node->high))
6172 /* Node is single valued. First see if the index expression matches
6173 this node and then check our children, if any. */
6175 do_jump_if_equal (index,
6176 convert_modes (mode, imode,
6177 expand_expr (node->low, NULL_RTX,
6178 VOIDmode, 0),
6179 unsignedp),
6180 label_rtx (node->code_label), unsignedp);
6182 if (node->right != 0 && node->left != 0)
6184 /* This node has children on both sides.
6185 Dispatch to one side or the other
6186 by comparing the index value with this node's value.
6187 If one subtree is bounded, check that one first,
6188 so we can avoid real branches in the tree. */
6190 if (node_is_bounded (node->right, index_type))
6192 emit_cmp_and_jump_insns (index,
6193 convert_modes
6194 (mode, imode,
6195 expand_expr (node->high, NULL_RTX,
6196 VOIDmode, 0),
6197 unsignedp),
6198 GT, NULL_RTX, mode, unsignedp,
6199 label_rtx (node->right->code_label));
6200 emit_case_nodes (index, node->left, default_label, index_type);
6203 else if (node_is_bounded (node->left, index_type))
6205 emit_cmp_and_jump_insns (index,
6206 convert_modes
6207 (mode, imode,
6208 expand_expr (node->high, NULL_RTX,
6209 VOIDmode, 0),
6210 unsignedp),
6211 LT, NULL_RTX, mode, unsignedp,
6212 label_rtx (node->left->code_label));
6213 emit_case_nodes (index, node->right, default_label, index_type);
6216 else
6218 /* Neither node is bounded. First distinguish the two sides;
6219 then emit the code for one side at a time. */
6221 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6223 /* See if the value is on the right. */
6224 emit_cmp_and_jump_insns (index,
6225 convert_modes
6226 (mode, imode,
6227 expand_expr (node->high, NULL_RTX,
6228 VOIDmode, 0),
6229 unsignedp),
6230 GT, NULL_RTX, mode, unsignedp,
6231 label_rtx (test_label));
6233 /* Value must be on the left.
6234 Handle the left-hand subtree. */
6235 emit_case_nodes (index, node->left, default_label, index_type);
6236 /* If left-hand subtree does nothing,
6237 go to default. */
6238 emit_jump_if_reachable (default_label);
6240 /* Code branches here for the right-hand subtree. */
6241 expand_label (test_label);
6242 emit_case_nodes (index, node->right, default_label, index_type);
6246 else if (node->right != 0 && node->left == 0)
6248 /* Here we have a right child but no left so we issue conditional
6249 branch to default and process the right child.
6251 Omit the conditional branch to default if we it avoid only one
6252 right child; it costs too much space to save so little time. */
6254 if (node->right->right || node->right->left
6255 || !tree_int_cst_equal (node->right->low, node->right->high))
6257 if (!node_has_low_bound (node, index_type))
6259 emit_cmp_and_jump_insns (index,
6260 convert_modes
6261 (mode, imode,
6262 expand_expr (node->high, NULL_RTX,
6263 VOIDmode, 0),
6264 unsignedp),
6265 LT, NULL_RTX, mode, unsignedp,
6266 default_label);
6269 emit_case_nodes (index, node->right, default_label, index_type);
6271 else
6272 /* We cannot process node->right normally
6273 since we haven't ruled out the numbers less than
6274 this node's value. So handle node->right explicitly. */
6275 do_jump_if_equal (index,
6276 convert_modes
6277 (mode, imode,
6278 expand_expr (node->right->low, NULL_RTX,
6279 VOIDmode, 0),
6280 unsignedp),
6281 label_rtx (node->right->code_label), unsignedp);
6284 else if (node->right == 0 && node->left != 0)
6286 /* Just one subtree, on the left. */
6287 if (node->left->left || node->left->right
6288 || !tree_int_cst_equal (node->left->low, node->left->high))
6290 if (!node_has_high_bound (node, index_type))
6292 emit_cmp_and_jump_insns (index,
6293 convert_modes
6294 (mode, imode,
6295 expand_expr (node->high, NULL_RTX,
6296 VOIDmode, 0),
6297 unsignedp),
6298 GT, NULL_RTX, mode, unsignedp,
6299 default_label);
6302 emit_case_nodes (index, node->left, default_label, index_type);
6304 else
6305 /* We cannot process node->left normally
6306 since we haven't ruled out the numbers less than
6307 this node's value. So handle node->left explicitly. */
6308 do_jump_if_equal (index,
6309 convert_modes
6310 (mode, imode,
6311 expand_expr (node->left->low, NULL_RTX,
6312 VOIDmode, 0),
6313 unsignedp),
6314 label_rtx (node->left->code_label), unsignedp);
6317 else
6319 /* Node is a range. These cases are very similar to those for a single
6320 value, except that we do not start by testing whether this node
6321 is the one to branch to. */
6323 if (node->right != 0 && node->left != 0)
6325 /* Node has subtrees on both sides.
6326 If the right-hand subtree is bounded,
6327 test for it first, since we can go straight there.
6328 Otherwise, we need to make a branch in the control structure,
6329 then handle the two subtrees. */
6330 tree test_label = 0;
6332 if (node_is_bounded (node->right, index_type))
6333 /* Right hand node is fully bounded so we can eliminate any
6334 testing and branch directly to the target code. */
6335 emit_cmp_and_jump_insns (index,
6336 convert_modes
6337 (mode, imode,
6338 expand_expr (node->high, NULL_RTX,
6339 VOIDmode, 0),
6340 unsignedp),
6341 GT, NULL_RTX, mode, unsignedp,
6342 label_rtx (node->right->code_label));
6343 else
6345 /* Right hand node requires testing.
6346 Branch to a label where we will handle it later. */
6348 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6349 emit_cmp_and_jump_insns (index,
6350 convert_modes
6351 (mode, imode,
6352 expand_expr (node->high, NULL_RTX,
6353 VOIDmode, 0),
6354 unsignedp),
6355 GT, NULL_RTX, mode, unsignedp,
6356 label_rtx (test_label));
6359 /* Value belongs to this node or to the left-hand subtree. */
6361 emit_cmp_and_jump_insns (index,
6362 convert_modes
6363 (mode, imode,
6364 expand_expr (node->low, NULL_RTX,
6365 VOIDmode, 0),
6366 unsignedp),
6367 GE, NULL_RTX, mode, unsignedp,
6368 label_rtx (node->code_label));
6370 /* Handle the left-hand subtree. */
6371 emit_case_nodes (index, node->left, default_label, index_type);
6373 /* If right node had to be handled later, do that now. */
6375 if (test_label)
6377 /* If the left-hand subtree fell through,
6378 don't let it fall into the right-hand subtree. */
6379 emit_jump_if_reachable (default_label);
6381 expand_label (test_label);
6382 emit_case_nodes (index, node->right, default_label, index_type);
6386 else if (node->right != 0 && node->left == 0)
6388 /* Deal with values to the left of this node,
6389 if they are possible. */
6390 if (!node_has_low_bound (node, index_type))
6392 emit_cmp_and_jump_insns (index,
6393 convert_modes
6394 (mode, imode,
6395 expand_expr (node->low, NULL_RTX,
6396 VOIDmode, 0),
6397 unsignedp),
6398 LT, NULL_RTX, mode, unsignedp,
6399 default_label);
6402 /* Value belongs to this node or to the right-hand subtree. */
6404 emit_cmp_and_jump_insns (index,
6405 convert_modes
6406 (mode, imode,
6407 expand_expr (node->high, NULL_RTX,
6408 VOIDmode, 0),
6409 unsignedp),
6410 LE, NULL_RTX, mode, unsignedp,
6411 label_rtx (node->code_label));
6413 emit_case_nodes (index, node->right, default_label, index_type);
6416 else if (node->right == 0 && node->left != 0)
6418 /* Deal with values to the right of this node,
6419 if they are possible. */
6420 if (!node_has_high_bound (node, index_type))
6422 emit_cmp_and_jump_insns (index,
6423 convert_modes
6424 (mode, imode,
6425 expand_expr (node->high, NULL_RTX,
6426 VOIDmode, 0),
6427 unsignedp),
6428 GT, NULL_RTX, mode, unsignedp,
6429 default_label);
6432 /* Value belongs to this node or to the left-hand subtree. */
6434 emit_cmp_and_jump_insns (index,
6435 convert_modes
6436 (mode, imode,
6437 expand_expr (node->low, NULL_RTX,
6438 VOIDmode, 0),
6439 unsignedp),
6440 GE, NULL_RTX, mode, unsignedp,
6441 label_rtx (node->code_label));
6443 emit_case_nodes (index, node->left, default_label, index_type);
6446 else
6448 /* Node has no children so we check low and high bounds to remove
6449 redundant tests. Only one of the bounds can exist,
6450 since otherwise this node is bounded--a case tested already. */
6451 int high_bound = node_has_high_bound (node, index_type);
6452 int low_bound = node_has_low_bound (node, index_type);
6454 if (!high_bound && low_bound)
6456 emit_cmp_and_jump_insns (index,
6457 convert_modes
6458 (mode, imode,
6459 expand_expr (node->high, NULL_RTX,
6460 VOIDmode, 0),
6461 unsignedp),
6462 GT, NULL_RTX, mode, unsignedp,
6463 default_label);
6466 else if (!low_bound && high_bound)
6468 emit_cmp_and_jump_insns (index,
6469 convert_modes
6470 (mode, imode,
6471 expand_expr (node->low, NULL_RTX,
6472 VOIDmode, 0),
6473 unsignedp),
6474 LT, NULL_RTX, mode, unsignedp,
6475 default_label);
6477 else if (!low_bound && !high_bound)
6479 /* Widen LOW and HIGH to the same width as INDEX. */
6480 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6481 tree low = build1 (CONVERT_EXPR, type, node->low);
6482 tree high = build1 (CONVERT_EXPR, type, node->high);
6483 rtx low_rtx, new_index, new_bound;
6485 /* Instead of doing two branches, emit one unsigned branch for
6486 (index-low) > (high-low). */
6487 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6488 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6489 NULL_RTX, unsignedp,
6490 OPTAB_WIDEN);
6491 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6492 high, low)),
6493 NULL_RTX, mode, 0);
6495 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6496 mode, 1, default_label);
6499 emit_jump (label_rtx (node->code_label));
6504 #include "gt-stmt.h"