* config/arm/elf.h (ASM_OUTPUT_ALIGNED_COMMON): Remove definition.
[official-gcc.git] / gcc / stmt.c
blob0fb7fb923b17d8b13a2fb7c71b30e290b72457e0
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
61 /* Assume that case vectors are not pc-relative. */
62 #ifndef CASE_VECTOR_PC_RELATIVE
63 #define CASE_VECTOR_PC_RELATIVE 0
64 #endif
66 /* Functions and data structures for expanding case statements. */
68 /* Case label structure, used to hold info on labels within case
69 statements. We handle "range" labels; for a single-value label
70 as in C, the high and low limits are the same.
72 An AVL tree of case nodes is initially created, and later transformed
73 to a list linked via the RIGHT fields in the nodes. Nodes with
74 higher case values are later in the list.
76 Switch statements can be output in one of two forms. A branch table
77 is used if there are more than a few labels and the labels are dense
78 within the range between the smallest and largest case value. If a
79 branch table is used, no further manipulations are done with the case
80 node chain.
82 The alternative to the use of a branch table is to generate a series
83 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
84 and PARENT fields to hold a binary tree. Initially the tree is
85 totally unbalanced, with everything on the right. We balance the tree
86 with nodes on the left having lower case values than the parent
87 and nodes on the right having higher values. We then output the tree
88 in order. */
90 struct case_node GTY(())
92 struct case_node *left; /* Left son in binary tree */
93 struct case_node *right; /* Right son in binary tree; also node chain */
94 struct case_node *parent; /* Parent of node in binary tree */
95 tree low; /* Lowest index value for this label */
96 tree high; /* Highest index value for this label */
97 tree code_label; /* Label to jump to when node matches */
98 int balance;
101 typedef struct case_node case_node;
102 typedef struct case_node *case_node_ptr;
104 /* These are used by estimate_case_costs and balance_case_nodes. */
106 /* This must be a signed type, and non-ANSI compilers lack signed char. */
107 static short cost_table_[129];
108 static int use_cost_table;
109 static int cost_table_initialized;
111 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
112 is unsigned. */
113 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
115 /* Stack of control and binding constructs we are currently inside.
117 These constructs begin when you call `expand_start_WHATEVER'
118 and end when you call `expand_end_WHATEVER'. This stack records
119 info about how the construct began that tells the end-function
120 what to do. It also may provide information about the construct
121 to alter the behavior of other constructs within the body.
122 For example, they may affect the behavior of C `break' and `continue'.
124 Each construct gets one `struct nesting' object.
125 All of these objects are chained through the `all' field.
126 `nesting_stack' points to the first object (innermost construct).
127 The position of an entry on `nesting_stack' is in its `depth' field.
129 Each type of construct has its own individual stack.
130 For example, loops have `loop_stack'. Each object points to the
131 next object of the same type through the `next' field.
133 Some constructs are visible to `break' exit-statements and others
134 are not. Which constructs are visible depends on the language.
135 Therefore, the data structure allows each construct to be visible
136 or not, according to the args given when the construct is started.
137 The construct is visible if the `exit_label' field is non-null.
138 In that case, the value should be a CODE_LABEL rtx. */
140 struct nesting GTY(())
142 struct nesting *all;
143 struct nesting *next;
144 int depth;
145 rtx exit_label;
146 enum nesting_desc {
147 COND_NESTING,
148 LOOP_NESTING,
149 BLOCK_NESTING,
150 CASE_NESTING
151 } desc;
152 union nesting_u
154 /* For conds (if-then and if-then-else statements). */
155 struct nesting_cond
157 /* Label for the end of the if construct.
158 There is none if EXITFLAG was not set
159 and no `else' has been seen yet. */
160 rtx endif_label;
161 /* Label for the end of this alternative.
162 This may be the end of the if or the next else/elseif. */
163 rtx next_label;
164 } GTY ((tag ("COND_NESTING"))) cond;
165 /* For loops. */
166 struct nesting_loop
168 /* Label at the top of the loop; place to loop back to. */
169 rtx start_label;
170 /* Label at the end of the whole construct. */
171 rtx end_label;
172 /* Label for `continue' statement to jump to;
173 this is in front of the stepper of the loop. */
174 rtx continue_label;
175 } GTY ((tag ("LOOP_NESTING"))) loop;
176 /* For variable binding contours. */
177 struct nesting_block
179 /* Sequence number of this binding contour within the function,
180 in order of entry. */
181 int block_start_count;
182 /* Nonzero => value to restore stack to on exit. */
183 rtx stack_level;
184 /* The NOTE that starts this contour.
185 Used by expand_goto to check whether the destination
186 is within each contour or not. */
187 rtx first_insn;
188 /* Innermost containing binding contour that has a stack level. */
189 struct nesting *innermost_stack_block;
190 /* List of cleanups to be run on exit from this contour.
191 This is a list of expressions to be evaluated.
192 The TREE_PURPOSE of each link is the ..._DECL node
193 which the cleanup pertains to. */
194 tree cleanups;
195 /* List of cleanup-lists of blocks containing this block,
196 as they were at the locus where this block appears.
197 There is an element for each containing block,
198 ordered innermost containing block first.
199 The tail of this list can be 0,
200 if all remaining elements would be empty lists.
201 The element's TREE_VALUE is the cleanup-list of that block,
202 which may be null. */
203 tree outer_cleanups;
204 /* Chain of labels defined inside this binding contour.
205 For contours that have stack levels or cleanups. */
206 struct label_chain *label_chain;
207 /* Nonzero if this is associated with an EH region. */
208 int exception_region;
209 /* The saved target_temp_slot_level from our outer block.
210 We may reset target_temp_slot_level to be the level of
211 this block, if that is done, target_temp_slot_level
212 reverts to the saved target_temp_slot_level at the very
213 end of the block. */
214 int block_target_temp_slot_level;
215 /* True if we are currently emitting insns in an area of
216 output code that is controlled by a conditional
217 expression. This is used by the cleanup handling code to
218 generate conditional cleanup actions. */
219 int conditional_code;
220 /* A place to move the start of the exception region for any
221 of the conditional cleanups, must be at the end or after
222 the start of the last unconditional cleanup, and before any
223 conditional branch points. */
224 rtx last_unconditional_cleanup;
225 } GTY ((tag ("BLOCK_NESTING"))) block;
226 /* For switch (C) or case (Pascal) statements,
227 and also for dummies (see `expand_start_case_dummy'). */
228 struct nesting_case
230 /* The insn after which the case dispatch should finally
231 be emitted. Zero for a dummy. */
232 rtx start;
233 /* A list of case labels; it is first built as an AVL tree.
234 During expand_end_case, this is converted to a list, and may be
235 rearranged into a nearly balanced binary tree. */
236 struct case_node *case_list;
237 /* Label to jump to if no case matches. */
238 tree default_label;
239 /* The expression to be dispatched on. */
240 tree index_expr;
241 /* Type that INDEX_EXPR should be converted to. */
242 tree nominal_type;
243 /* Name of this kind of statement, for warnings. */
244 const char *printname;
245 /* Used to save no_line_numbers till we see the first case label.
246 We set this to -1 when we see the first case label in this
247 case statement. */
248 int line_number_status;
249 } GTY ((tag ("CASE_NESTING"))) case_stmt;
250 } GTY ((desc ("%1.desc"))) data;
253 /* Allocate and return a new `struct nesting'. */
255 #define ALLOC_NESTING() \
256 (struct nesting *) ggc_alloc (sizeof (struct nesting))
258 /* Pop the nesting stack element by element until we pop off
259 the element which is at the top of STACK.
260 Update all the other stacks, popping off elements from them
261 as we pop them from nesting_stack. */
263 #define POPSTACK(STACK) \
264 do { struct nesting *target = STACK; \
265 struct nesting *this; \
266 do { this = nesting_stack; \
267 if (loop_stack == this) \
268 loop_stack = loop_stack->next; \
269 if (cond_stack == this) \
270 cond_stack = cond_stack->next; \
271 if (block_stack == this) \
272 block_stack = block_stack->next; \
273 if (stack_block_stack == this) \
274 stack_block_stack = stack_block_stack->next; \
275 if (case_stack == this) \
276 case_stack = case_stack->next; \
277 nesting_depth = nesting_stack->depth - 1; \
278 nesting_stack = this->all; } \
279 while (this != target); } while (0)
281 /* In some cases it is impossible to generate code for a forward goto
282 until the label definition is seen. This happens when it may be necessary
283 for the goto to reset the stack pointer: we don't yet know how to do that.
284 So expand_goto puts an entry on this fixup list.
285 Each time a binding contour that resets the stack is exited,
286 we check each fixup.
287 If the target label has now been defined, we can insert the proper code. */
289 struct goto_fixup GTY(())
291 /* Points to following fixup. */
292 struct goto_fixup *next;
293 /* Points to the insn before the jump insn.
294 If more code must be inserted, it goes after this insn. */
295 rtx before_jump;
296 /* The LABEL_DECL that this jump is jumping to, or 0
297 for break, continue or return. */
298 tree target;
299 /* The BLOCK for the place where this goto was found. */
300 tree context;
301 /* The CODE_LABEL rtx that this is jumping to. */
302 rtx target_rtl;
303 /* Number of binding contours started in current function
304 before the label reference. */
305 int block_start_count;
306 /* The outermost stack level that should be restored for this jump.
307 Each time a binding contour that resets the stack is exited,
308 if the target label is *not* yet defined, this slot is updated. */
309 rtx stack_level;
310 /* List of lists of cleanup expressions to be run by this goto.
311 There is one element for each block that this goto is within.
312 The tail of this list can be 0,
313 if all remaining elements would be empty.
314 The TREE_VALUE contains the cleanup list of that block as of the
315 time this goto was seen.
316 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
317 tree cleanup_list_list;
320 /* Within any binding contour that must restore a stack level,
321 all labels are recorded with a chain of these structures. */
323 struct label_chain GTY(())
325 /* Points to following fixup. */
326 struct label_chain *next;
327 tree label;
330 struct stmt_status GTY(())
332 /* Chain of all pending binding contours. */
333 struct nesting * x_block_stack;
335 /* If any new stacks are added here, add them to POPSTACKS too. */
337 /* Chain of all pending binding contours that restore stack levels
338 or have cleanups. */
339 struct nesting * x_stack_block_stack;
341 /* Chain of all pending conditional statements. */
342 struct nesting * x_cond_stack;
344 /* Chain of all pending loops. */
345 struct nesting * x_loop_stack;
347 /* Chain of all pending case or switch statements. */
348 struct nesting * x_case_stack;
350 /* Separate chain including all of the above,
351 chained through the `all' field. */
352 struct nesting * x_nesting_stack;
354 /* Number of entries on nesting_stack now. */
355 int x_nesting_depth;
357 /* Number of binding contours started so far in this function. */
358 int x_block_start_count;
360 /* Each time we expand an expression-statement,
361 record the expr's type and its RTL value here. */
362 tree x_last_expr_type;
363 rtx x_last_expr_value;
365 /* Nonzero if within a ({...}) grouping, in which case we must
366 always compute a value for each expr-stmt in case it is the last one. */
367 int x_expr_stmts_for_value;
369 /* Filename and line number of last line-number note,
370 whether we actually emitted it or not. */
371 const char *x_emit_filename;
372 int x_emit_lineno;
374 struct goto_fixup *x_goto_fixup_chain;
377 #define block_stack (cfun->stmt->x_block_stack)
378 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
379 #define cond_stack (cfun->stmt->x_cond_stack)
380 #define loop_stack (cfun->stmt->x_loop_stack)
381 #define case_stack (cfun->stmt->x_case_stack)
382 #define nesting_stack (cfun->stmt->x_nesting_stack)
383 #define nesting_depth (cfun->stmt->x_nesting_depth)
384 #define current_block_start_count (cfun->stmt->x_block_start_count)
385 #define last_expr_type (cfun->stmt->x_last_expr_type)
386 #define last_expr_value (cfun->stmt->x_last_expr_value)
387 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
388 #define emit_filename (cfun->stmt->x_emit_filename)
389 #define emit_lineno (cfun->stmt->x_emit_lineno)
390 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
392 /* Nonzero if we are using EH to handle cleanups. */
393 static int using_eh_for_cleanups_p = 0;
395 static int n_occurrences PARAMS ((int, const char *));
396 static bool parse_input_constraint PARAMS ((const char **, int, int, int,
397 int, const char * const *,
398 bool *, bool *));
399 static bool decl_conflicts_with_clobbers_p PARAMS ((tree, const HARD_REG_SET));
400 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
401 static int expand_fixup PARAMS ((tree, rtx, rtx));
402 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
403 static void expand_nl_goto_receiver PARAMS ((void));
404 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
405 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
406 rtx, int));
407 static bool check_operand_nalternatives PARAMS ((tree, tree));
408 static bool check_unique_operand_names PARAMS ((tree, tree));
409 static tree resolve_operand_names PARAMS ((tree, tree, tree,
410 const char **));
411 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
412 static void expand_null_return_1 PARAMS ((rtx));
413 static enum br_predictor return_prediction PARAMS ((rtx));
414 static void expand_value_return PARAMS ((rtx));
415 static int tail_recursion_args PARAMS ((tree, tree));
416 static void expand_cleanups PARAMS ((tree, int, int));
417 static void check_seenlabel PARAMS ((void));
418 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
419 static int estimate_case_costs PARAMS ((case_node_ptr));
420 static bool same_case_target_p PARAMS ((rtx, rtx));
421 static void strip_default_case_nodes PARAMS ((case_node_ptr *, rtx));
422 static bool lshift_cheap_p PARAMS ((void));
423 static int case_bit_test_cmp PARAMS ((const void *, const void *));
424 static void emit_case_bit_tests PARAMS ((tree, tree, tree, tree,
425 case_node_ptr, rtx));
426 static void group_case_nodes PARAMS ((case_node_ptr));
427 static void balance_case_nodes PARAMS ((case_node_ptr *,
428 case_node_ptr));
429 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
430 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
431 static int node_is_bounded PARAMS ((case_node_ptr, tree));
432 static void emit_jump_if_reachable PARAMS ((rtx));
433 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
434 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
436 void
437 using_eh_for_cleanups ()
439 using_eh_for_cleanups_p = 1;
442 void
443 init_stmt_for_function ()
445 cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
447 /* We are not currently within any block, conditional, loop or case. */
448 block_stack = 0;
449 stack_block_stack = 0;
450 loop_stack = 0;
451 case_stack = 0;
452 cond_stack = 0;
453 nesting_stack = 0;
454 nesting_depth = 0;
456 current_block_start_count = 0;
458 /* No gotos have been expanded yet. */
459 goto_fixup_chain = 0;
461 /* We are not processing a ({...}) grouping. */
462 expr_stmts_for_value = 0;
463 clear_last_expr ();
466 /* Record the current file and line. Called from emit_line_note. */
467 void
468 set_file_and_line_for_stmt (file, line)
469 const char *file;
470 int line;
472 /* If we're outputting an inline function, and we add a line note,
473 there may be no CFUN->STMT information. So, there's no need to
474 update it. */
475 if (cfun->stmt)
477 emit_filename = file;
478 emit_lineno = line;
482 /* Emit a no-op instruction. */
484 void
485 emit_nop ()
487 rtx last_insn;
489 last_insn = get_last_insn ();
490 if (!optimize
491 && (GET_CODE (last_insn) == CODE_LABEL
492 || (GET_CODE (last_insn) == NOTE
493 && prev_real_insn (last_insn) == 0)))
494 emit_insn (gen_nop ());
497 /* Return the rtx-label that corresponds to a LABEL_DECL,
498 creating it if necessary. */
501 label_rtx (label)
502 tree label;
504 if (TREE_CODE (label) != LABEL_DECL)
505 abort ();
507 if (!DECL_RTL_SET_P (label))
508 SET_DECL_RTL (label, gen_label_rtx ());
510 return DECL_RTL (label);
514 /* Add an unconditional jump to LABEL as the next sequential instruction. */
516 void
517 emit_jump (label)
518 rtx label;
520 do_pending_stack_adjust ();
521 emit_jump_insn (gen_jump (label));
522 emit_barrier ();
525 /* Emit code to jump to the address
526 specified by the pointer expression EXP. */
528 void
529 expand_computed_goto (exp)
530 tree exp;
532 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
534 #ifdef POINTERS_EXTEND_UNSIGNED
535 if (GET_MODE (x) != Pmode)
536 x = convert_memory_address (Pmode, x);
537 #endif
539 emit_queue ();
541 if (! cfun->computed_goto_common_label)
543 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
544 cfun->computed_goto_common_label = gen_label_rtx ();
545 emit_label (cfun->computed_goto_common_label);
547 do_pending_stack_adjust ();
548 emit_indirect_jump (cfun->computed_goto_common_reg);
550 current_function_has_computed_jump = 1;
552 else
554 emit_move_insn (cfun->computed_goto_common_reg, x);
555 emit_jump (cfun->computed_goto_common_label);
559 /* Handle goto statements and the labels that they can go to. */
561 /* Specify the location in the RTL code of a label LABEL,
562 which is a LABEL_DECL tree node.
564 This is used for the kind of label that the user can jump to with a
565 goto statement, and for alternatives of a switch or case statement.
566 RTL labels generated for loops and conditionals don't go through here;
567 they are generated directly at the RTL level, by other functions below.
569 Note that this has nothing to do with defining label *names*.
570 Languages vary in how they do that and what that even means. */
572 void
573 expand_label (label)
574 tree label;
576 struct label_chain *p;
578 do_pending_stack_adjust ();
579 emit_label (label_rtx (label));
580 if (DECL_NAME (label))
581 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
583 if (stack_block_stack != 0)
585 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
586 p->next = stack_block_stack->data.block.label_chain;
587 stack_block_stack->data.block.label_chain = p;
588 p->label = label;
592 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
593 from nested functions. */
595 void
596 declare_nonlocal_label (label)
597 tree label;
599 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
601 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
602 LABEL_PRESERVE_P (label_rtx (label)) = 1;
603 if (nonlocal_goto_handler_slots == 0)
605 emit_stack_save (SAVE_NONLOCAL,
606 &nonlocal_goto_stack_level,
607 PREV_INSN (tail_recursion_reentry));
609 nonlocal_goto_handler_slots
610 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
613 /* Generate RTL code for a `goto' statement with target label LABEL.
614 LABEL should be a LABEL_DECL tree node that was or will later be
615 defined with `expand_label'. */
617 void
618 expand_goto (label)
619 tree label;
621 tree context;
623 /* Check for a nonlocal goto to a containing function. */
624 context = decl_function_context (label);
625 if (context != 0 && context != current_function_decl)
627 struct function *p = find_function_data (context);
628 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
629 rtx handler_slot, static_chain, save_area, insn;
630 tree link;
632 /* Find the corresponding handler slot for this label. */
633 handler_slot = p->x_nonlocal_goto_handler_slots;
634 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
635 link = TREE_CHAIN (link))
636 handler_slot = XEXP (handler_slot, 1);
637 handler_slot = XEXP (handler_slot, 0);
639 p->has_nonlocal_label = 1;
640 current_function_has_nonlocal_goto = 1;
641 LABEL_REF_NONLOCAL_P (label_ref) = 1;
643 /* Copy the rtl for the slots so that they won't be shared in
644 case the virtual stack vars register gets instantiated differently
645 in the parent than in the child. */
647 static_chain = copy_to_reg (lookup_static_chain (label));
649 /* Get addr of containing function's current nonlocal goto handler,
650 which will do any cleanups and then jump to the label. */
651 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
652 virtual_stack_vars_rtx,
653 static_chain));
655 /* Get addr of containing function's nonlocal save area. */
656 save_area = p->x_nonlocal_goto_stack_level;
657 if (save_area)
658 save_area = replace_rtx (copy_rtx (save_area),
659 virtual_stack_vars_rtx, static_chain);
661 #if HAVE_nonlocal_goto
662 if (HAVE_nonlocal_goto)
663 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
664 save_area, label_ref));
665 else
666 #endif
668 /* Restore frame pointer for containing function.
669 This sets the actual hard register used for the frame pointer
670 to the location of the function's incoming static chain info.
671 The non-local goto handler will then adjust it to contain the
672 proper value and reload the argument pointer, if needed. */
673 emit_move_insn (hard_frame_pointer_rtx, static_chain);
674 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
676 /* USE of hard_frame_pointer_rtx added for consistency;
677 not clear if really needed. */
678 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
679 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
680 emit_indirect_jump (handler_slot);
683 /* Search backwards to the jump insn and mark it as a
684 non-local goto. */
685 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
687 if (GET_CODE (insn) == JUMP_INSN)
689 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
690 const0_rtx, REG_NOTES (insn));
691 break;
693 else if (GET_CODE (insn) == CALL_INSN)
694 break;
697 else
698 expand_goto_internal (label, label_rtx (label), NULL_RTX);
701 /* Generate RTL code for a `goto' statement with target label BODY.
702 LABEL should be a LABEL_REF.
703 LAST_INSN, if non-0, is the rtx we should consider as the last
704 insn emitted (for the purposes of cleaning up a return). */
706 static void
707 expand_goto_internal (body, label, last_insn)
708 tree body;
709 rtx label;
710 rtx last_insn;
712 struct nesting *block;
713 rtx stack_level = 0;
715 if (GET_CODE (label) != CODE_LABEL)
716 abort ();
718 /* If label has already been defined, we can tell now
719 whether and how we must alter the stack level. */
721 if (PREV_INSN (label) != 0)
723 /* Find the innermost pending block that contains the label.
724 (Check containment by comparing insn-uids.)
725 Then restore the outermost stack level within that block,
726 and do cleanups of all blocks contained in it. */
727 for (block = block_stack; block; block = block->next)
729 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
730 break;
731 if (block->data.block.stack_level != 0)
732 stack_level = block->data.block.stack_level;
733 /* Execute the cleanups for blocks we are exiting. */
734 if (block->data.block.cleanups != 0)
736 expand_cleanups (block->data.block.cleanups, 1, 1);
737 do_pending_stack_adjust ();
741 if (stack_level)
743 /* Ensure stack adjust isn't done by emit_jump, as this
744 would clobber the stack pointer. This one should be
745 deleted as dead by flow. */
746 clear_pending_stack_adjust ();
747 do_pending_stack_adjust ();
749 /* Don't do this adjust if it's to the end label and this function
750 is to return with a depressed stack pointer. */
751 if (label == return_label
752 && (((TREE_CODE (TREE_TYPE (current_function_decl))
753 == FUNCTION_TYPE)
754 && (TYPE_RETURNS_STACK_DEPRESSED
755 (TREE_TYPE (current_function_decl))))))
757 else
758 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
761 if (body != 0 && DECL_TOO_LATE (body))
762 error ("jump to `%s' invalidly jumps into binding contour",
763 IDENTIFIER_POINTER (DECL_NAME (body)));
765 /* Label not yet defined: may need to put this goto
766 on the fixup list. */
767 else if (! expand_fixup (body, label, last_insn))
769 /* No fixup needed. Record that the label is the target
770 of at least one goto that has no fixup. */
771 if (body != 0)
772 TREE_ADDRESSABLE (body) = 1;
775 emit_jump (label);
778 /* Generate if necessary a fixup for a goto
779 whose target label in tree structure (if any) is TREE_LABEL
780 and whose target in rtl is RTL_LABEL.
782 If LAST_INSN is nonzero, we pretend that the jump appears
783 after insn LAST_INSN instead of at the current point in the insn stream.
785 The fixup will be used later to insert insns just before the goto.
786 Those insns will restore the stack level as appropriate for the
787 target label, and will (in the case of C++) also invoke any object
788 destructors which have to be invoked when we exit the scopes which
789 are exited by the goto.
791 Value is nonzero if a fixup is made. */
793 static int
794 expand_fixup (tree_label, rtl_label, last_insn)
795 tree tree_label;
796 rtx rtl_label;
797 rtx last_insn;
799 struct nesting *block, *end_block;
801 /* See if we can recognize which block the label will be output in.
802 This is possible in some very common cases.
803 If we succeed, set END_BLOCK to that block.
804 Otherwise, set it to 0. */
806 if (cond_stack
807 && (rtl_label == cond_stack->data.cond.endif_label
808 || rtl_label == cond_stack->data.cond.next_label))
809 end_block = cond_stack;
810 /* If we are in a loop, recognize certain labels which
811 are likely targets. This reduces the number of fixups
812 we need to create. */
813 else if (loop_stack
814 && (rtl_label == loop_stack->data.loop.start_label
815 || rtl_label == loop_stack->data.loop.end_label
816 || rtl_label == loop_stack->data.loop.continue_label))
817 end_block = loop_stack;
818 else
819 end_block = 0;
821 /* Now set END_BLOCK to the binding level to which we will return. */
823 if (end_block)
825 struct nesting *next_block = end_block->all;
826 block = block_stack;
828 /* First see if the END_BLOCK is inside the innermost binding level.
829 If so, then no cleanups or stack levels are relevant. */
830 while (next_block && next_block != block)
831 next_block = next_block->all;
833 if (next_block)
834 return 0;
836 /* Otherwise, set END_BLOCK to the innermost binding level
837 which is outside the relevant control-structure nesting. */
838 next_block = block_stack->next;
839 for (block = block_stack; block != end_block; block = block->all)
840 if (block == next_block)
841 next_block = next_block->next;
842 end_block = next_block;
845 /* Does any containing block have a stack level or cleanups?
846 If not, no fixup is needed, and that is the normal case
847 (the only case, for standard C). */
848 for (block = block_stack; block != end_block; block = block->next)
849 if (block->data.block.stack_level != 0
850 || block->data.block.cleanups != 0)
851 break;
853 if (block != end_block)
855 /* Ok, a fixup is needed. Add a fixup to the list of such. */
856 struct goto_fixup *fixup
857 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
858 /* In case an old stack level is restored, make sure that comes
859 after any pending stack adjust. */
860 /* ?? If the fixup isn't to come at the present position,
861 doing the stack adjust here isn't useful. Doing it with our
862 settings at that location isn't useful either. Let's hope
863 someone does it! */
864 if (last_insn == 0)
865 do_pending_stack_adjust ();
866 fixup->target = tree_label;
867 fixup->target_rtl = rtl_label;
869 /* Create a BLOCK node and a corresponding matched set of
870 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
871 this point. The notes will encapsulate any and all fixup
872 code which we might later insert at this point in the insn
873 stream. Also, the BLOCK node will be the parent (i.e. the
874 `SUPERBLOCK') of any other BLOCK nodes which we might create
875 later on when we are expanding the fixup code.
877 Note that optimization passes (including expand_end_loop)
878 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
879 as a placeholder. */
882 rtx original_before_jump
883 = last_insn ? last_insn : get_last_insn ();
884 rtx start;
885 rtx end;
886 tree block;
888 block = make_node (BLOCK);
889 TREE_USED (block) = 1;
891 if (!cfun->x_whole_function_mode_p)
892 (*lang_hooks.decls.insert_block) (block);
893 else
895 BLOCK_CHAIN (block)
896 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
897 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
898 = block;
901 start_sequence ();
902 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
903 if (cfun->x_whole_function_mode_p)
904 NOTE_BLOCK (start) = block;
905 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
906 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
907 if (cfun->x_whole_function_mode_p)
908 NOTE_BLOCK (end) = block;
909 fixup->context = block;
910 end_sequence ();
911 emit_insn_after (start, original_before_jump);
914 fixup->block_start_count = current_block_start_count;
915 fixup->stack_level = 0;
916 fixup->cleanup_list_list
917 = ((block->data.block.outer_cleanups
918 || block->data.block.cleanups)
919 ? tree_cons (NULL_TREE, block->data.block.cleanups,
920 block->data.block.outer_cleanups)
921 : 0);
922 fixup->next = goto_fixup_chain;
923 goto_fixup_chain = fixup;
926 return block != 0;
929 /* Expand any needed fixups in the outputmost binding level of the
930 function. FIRST_INSN is the first insn in the function. */
932 void
933 expand_fixups (first_insn)
934 rtx first_insn;
936 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
939 /* When exiting a binding contour, process all pending gotos requiring fixups.
940 THISBLOCK is the structure that describes the block being exited.
941 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
942 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
943 FIRST_INSN is the insn that began this contour.
945 Gotos that jump out of this contour must restore the
946 stack level and do the cleanups before actually jumping.
948 DONT_JUMP_IN positive means report error if there is a jump into this
949 contour from before the beginning of the contour. This is also done if
950 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
952 static void
953 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
954 struct nesting *thisblock;
955 rtx stack_level;
956 tree cleanup_list;
957 rtx first_insn;
958 int dont_jump_in;
960 struct goto_fixup *f, *prev;
962 /* F is the fixup we are considering; PREV is the previous one. */
963 /* We run this loop in two passes so that cleanups of exited blocks
964 are run first, and blocks that are exited are marked so
965 afterwards. */
967 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
969 /* Test for a fixup that is inactive because it is already handled. */
970 if (f->before_jump == 0)
972 /* Delete inactive fixup from the chain, if that is easy to do. */
973 if (prev != 0)
974 prev->next = f->next;
976 /* Has this fixup's target label been defined?
977 If so, we can finalize it. */
978 else if (PREV_INSN (f->target_rtl) != 0)
980 rtx cleanup_insns;
982 /* If this fixup jumped into this contour from before the beginning
983 of this contour, report an error. This code used to use
984 the first non-label insn after f->target_rtl, but that's
985 wrong since such can be added, by things like put_var_into_stack
986 and have INSN_UIDs that are out of the range of the block. */
987 /* ??? Bug: this does not detect jumping in through intermediate
988 blocks that have stack levels or cleanups.
989 It detects only a problem with the innermost block
990 around the label. */
991 if (f->target != 0
992 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
993 || cleanup_list)
994 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
995 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
996 && ! DECL_ERROR_ISSUED (f->target))
998 error_with_decl (f->target,
999 "label `%s' used before containing binding contour");
1000 /* Prevent multiple errors for one label. */
1001 DECL_ERROR_ISSUED (f->target) = 1;
1004 /* We will expand the cleanups into a sequence of their own and
1005 then later on we will attach this new sequence to the insn
1006 stream just ahead of the actual jump insn. */
1008 start_sequence ();
1010 /* Temporarily restore the lexical context where we will
1011 logically be inserting the fixup code. We do this for the
1012 sake of getting the debugging information right. */
1014 (*lang_hooks.decls.pushlevel) (0);
1015 (*lang_hooks.decls.set_block) (f->context);
1017 /* Expand the cleanups for blocks this jump exits. */
1018 if (f->cleanup_list_list)
1020 tree lists;
1021 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1022 /* Marked elements correspond to blocks that have been closed.
1023 Do their cleanups. */
1024 if (TREE_ADDRESSABLE (lists)
1025 && TREE_VALUE (lists) != 0)
1027 expand_cleanups (TREE_VALUE (lists), 1, 1);
1028 /* Pop any pushes done in the cleanups,
1029 in case function is about to return. */
1030 do_pending_stack_adjust ();
1034 /* Restore stack level for the biggest contour that this
1035 jump jumps out of. */
1036 if (f->stack_level
1037 && ! (f->target_rtl == return_label
1038 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1039 == FUNCTION_TYPE)
1040 && (TYPE_RETURNS_STACK_DEPRESSED
1041 (TREE_TYPE (current_function_decl))))))
1042 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1044 /* Finish up the sequence containing the insns which implement the
1045 necessary cleanups, and then attach that whole sequence to the
1046 insn stream just ahead of the actual jump insn. Attaching it
1047 at that point insures that any cleanups which are in fact
1048 implicit C++ object destructions (which must be executed upon
1049 leaving the block) appear (to the debugger) to be taking place
1050 in an area of the generated code where the object(s) being
1051 destructed are still "in scope". */
1053 cleanup_insns = get_insns ();
1054 (*lang_hooks.decls.poplevel) (1, 0, 0);
1056 end_sequence ();
1057 emit_insn_after (cleanup_insns, f->before_jump);
1059 f->before_jump = 0;
1063 /* For any still-undefined labels, do the cleanups for this block now.
1064 We must do this now since items in the cleanup list may go out
1065 of scope when the block ends. */
1066 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1067 if (f->before_jump != 0
1068 && PREV_INSN (f->target_rtl) == 0
1069 /* Label has still not appeared. If we are exiting a block with
1070 a stack level to restore, that started before the fixup,
1071 mark this stack level as needing restoration
1072 when the fixup is later finalized. */
1073 && thisblock != 0
1074 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1075 means the label is undefined. That's erroneous, but possible. */
1076 && (thisblock->data.block.block_start_count
1077 <= f->block_start_count))
1079 tree lists = f->cleanup_list_list;
1080 rtx cleanup_insns;
1082 for (; lists; lists = TREE_CHAIN (lists))
1083 /* If the following elt. corresponds to our containing block
1084 then the elt. must be for this block. */
1085 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1087 start_sequence ();
1088 (*lang_hooks.decls.pushlevel) (0);
1089 (*lang_hooks.decls.set_block) (f->context);
1090 expand_cleanups (TREE_VALUE (lists), 1, 1);
1091 do_pending_stack_adjust ();
1092 cleanup_insns = get_insns ();
1093 (*lang_hooks.decls.poplevel) (1, 0, 0);
1094 end_sequence ();
1095 if (cleanup_insns != 0)
1096 f->before_jump
1097 = emit_insn_after (cleanup_insns, f->before_jump);
1099 f->cleanup_list_list = TREE_CHAIN (lists);
1102 if (stack_level)
1103 f->stack_level = stack_level;
1107 /* Return the number of times character C occurs in string S. */
1108 static int
1109 n_occurrences (c, s)
1110 int c;
1111 const char *s;
1113 int n = 0;
1114 while (*s)
1115 n += (*s++ == c);
1116 return n;
1119 /* Generate RTL for an asm statement (explicit assembler code).
1120 STRING is a STRING_CST node containing the assembler code text,
1121 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1122 insn is volatile; don't optimize it. */
1124 void
1125 expand_asm (string, vol)
1126 tree string;
1127 int vol;
1129 rtx body;
1131 if (TREE_CODE (string) == ADDR_EXPR)
1132 string = TREE_OPERAND (string, 0);
1134 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1136 MEM_VOLATILE_P (body) = vol;
1138 emit_insn (body);
1140 clear_last_expr ();
1143 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1144 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1145 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1146 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1147 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1148 constraint allows the use of a register operand. And, *IS_INOUT
1149 will be true if the operand is read-write, i.e., if it is used as
1150 an input as well as an output. If *CONSTRAINT_P is not in
1151 canonical form, it will be made canonical. (Note that `+' will be
1152 replaced with `=' as part of this process.)
1154 Returns TRUE if all went well; FALSE if an error occurred. */
1156 bool
1157 parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1158 allows_mem, allows_reg, is_inout)
1159 const char **constraint_p;
1160 int operand_num;
1161 int ninputs;
1162 int noutputs;
1163 bool *allows_mem;
1164 bool *allows_reg;
1165 bool *is_inout;
1167 const char *constraint = *constraint_p;
1168 const char *p;
1170 /* Assume the constraint doesn't allow the use of either a register
1171 or memory. */
1172 *allows_mem = false;
1173 *allows_reg = false;
1175 /* Allow the `=' or `+' to not be at the beginning of the string,
1176 since it wasn't explicitly documented that way, and there is a
1177 large body of code that puts it last. Swap the character to
1178 the front, so as not to uglify any place else. */
1179 p = strchr (constraint, '=');
1180 if (!p)
1181 p = strchr (constraint, '+');
1183 /* If the string doesn't contain an `=', issue an error
1184 message. */
1185 if (!p)
1187 error ("output operand constraint lacks `='");
1188 return false;
1191 /* If the constraint begins with `+', then the operand is both read
1192 from and written to. */
1193 *is_inout = (*p == '+');
1195 /* Canonicalize the output constraint so that it begins with `='. */
1196 if (p != constraint || is_inout)
1198 char *buf;
1199 size_t c_len = strlen (constraint);
1201 if (p != constraint)
1202 warning ("output constraint `%c' for operand %d is not at the beginning",
1203 *p, operand_num);
1205 /* Make a copy of the constraint. */
1206 buf = alloca (c_len + 1);
1207 strcpy (buf, constraint);
1208 /* Swap the first character and the `=' or `+'. */
1209 buf[p - constraint] = buf[0];
1210 /* Make sure the first character is an `='. (Until we do this,
1211 it might be a `+'.) */
1212 buf[0] = '=';
1213 /* Replace the constraint with the canonicalized string. */
1214 *constraint_p = ggc_alloc_string (buf, c_len);
1215 constraint = *constraint_p;
1218 /* Loop through the constraint string. */
1219 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1220 switch (*p)
1222 case '+':
1223 case '=':
1224 error ("operand constraint contains incorrectly positioned '+' or '='");
1225 return false;
1227 case '%':
1228 if (operand_num + 1 == ninputs + noutputs)
1230 error ("`%%' constraint used with last operand");
1231 return false;
1233 break;
1235 case 'V': case 'm': case 'o':
1236 *allows_mem = true;
1237 break;
1239 case '?': case '!': case '*': case '&': case '#':
1240 case 'E': case 'F': case 'G': case 'H':
1241 case 's': case 'i': case 'n':
1242 case 'I': case 'J': case 'K': case 'L': case 'M':
1243 case 'N': case 'O': case 'P': case ',':
1244 break;
1246 case '0': case '1': case '2': case '3': case '4':
1247 case '5': case '6': case '7': case '8': case '9':
1248 case '[':
1249 error ("matching constraint not valid in output operand");
1250 return false;
1252 case '<': case '>':
1253 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1254 excepting those that expand_call created. So match memory
1255 and hope. */
1256 *allows_mem = true;
1257 break;
1259 case 'g': case 'X':
1260 *allows_reg = true;
1261 *allows_mem = true;
1262 break;
1264 case 'p': case 'r':
1265 *allows_reg = true;
1266 break;
1268 default:
1269 if (!ISALPHA (*p))
1270 break;
1271 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1272 *allows_reg = true;
1273 #ifdef EXTRA_CONSTRAINT_STR
1274 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1275 *allows_reg = true;
1276 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1277 *allows_mem = true;
1278 else
1280 /* Otherwise we can't assume anything about the nature of
1281 the constraint except that it isn't purely registers.
1282 Treat it like "g" and hope for the best. */
1283 *allows_reg = true;
1284 *allows_mem = true;
1286 #endif
1287 break;
1290 return true;
1293 /* Similar, but for input constraints. */
1295 static bool
1296 parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1297 constraints, allows_mem, allows_reg)
1298 const char **constraint_p;
1299 int input_num;
1300 int ninputs;
1301 int noutputs;
1302 int ninout;
1303 const char * const * constraints;
1304 bool *allows_mem;
1305 bool *allows_reg;
1307 const char *constraint = *constraint_p;
1308 const char *orig_constraint = constraint;
1309 size_t c_len = strlen (constraint);
1310 size_t j;
1312 /* Assume the constraint doesn't allow the use of either
1313 a register or memory. */
1314 *allows_mem = false;
1315 *allows_reg = false;
1317 /* Make sure constraint has neither `=', `+', nor '&'. */
1319 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1320 switch (constraint[j])
1322 case '+': case '=': case '&':
1323 if (constraint == orig_constraint)
1325 error ("input operand constraint contains `%c'", constraint[j]);
1326 return false;
1328 break;
1330 case '%':
1331 if (constraint == orig_constraint
1332 && input_num + 1 == ninputs - ninout)
1334 error ("`%%' constraint used with last operand");
1335 return false;
1337 break;
1339 case 'V': case 'm': case 'o':
1340 *allows_mem = true;
1341 break;
1343 case '<': case '>':
1344 case '?': case '!': case '*': case '#':
1345 case 'E': case 'F': case 'G': case 'H':
1346 case 's': case 'i': case 'n':
1347 case 'I': case 'J': case 'K': case 'L': case 'M':
1348 case 'N': case 'O': case 'P': case ',':
1349 break;
1351 /* Whether or not a numeric constraint allows a register is
1352 decided by the matching constraint, and so there is no need
1353 to do anything special with them. We must handle them in
1354 the default case, so that we don't unnecessarily force
1355 operands to memory. */
1356 case '0': case '1': case '2': case '3': case '4':
1357 case '5': case '6': case '7': case '8': case '9':
1359 char *end;
1360 unsigned long match;
1362 match = strtoul (constraint + j, &end, 10);
1363 if (match >= (unsigned long) noutputs)
1365 error ("matching constraint references invalid operand number");
1366 return false;
1369 /* Try and find the real constraint for this dup. Only do this
1370 if the matching constraint is the only alternative. */
1371 if (*end == '\0'
1372 && (j == 0 || (j == 1 && constraint[0] == '%')))
1374 constraint = constraints[match];
1375 *constraint_p = constraint;
1376 c_len = strlen (constraint);
1377 j = 0;
1378 /* ??? At the end of the loop, we will skip the first part of
1379 the matched constraint. This assumes not only that the
1380 other constraint is an output constraint, but also that
1381 the '=' or '+' come first. */
1382 break;
1384 else
1385 j = end - constraint;
1386 /* Anticipate increment at end of loop. */
1387 j--;
1389 /* Fall through. */
1391 case 'p': case 'r':
1392 *allows_reg = true;
1393 break;
1395 case 'g': case 'X':
1396 *allows_reg = true;
1397 *allows_mem = true;
1398 break;
1400 default:
1401 if (! ISALPHA (constraint[j]))
1403 error ("invalid punctuation `%c' in constraint", constraint[j]);
1404 return false;
1406 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1407 != NO_REGS)
1408 *allows_reg = true;
1409 #ifdef EXTRA_CONSTRAINT_STR
1410 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1411 *allows_reg = true;
1412 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1413 *allows_mem = true;
1414 else
1416 /* Otherwise we can't assume anything about the nature of
1417 the constraint except that it isn't purely registers.
1418 Treat it like "g" and hope for the best. */
1419 *allows_reg = true;
1420 *allows_mem = true;
1422 #endif
1423 break;
1426 return true;
1429 /* Check for overlap between registers marked in CLOBBERED_REGS and
1430 anything inappropriate in DECL. Emit error and return TRUE for error,
1431 FALSE for ok. */
1433 static bool
1434 decl_conflicts_with_clobbers_p (decl, clobbered_regs)
1435 tree decl;
1436 const HARD_REG_SET clobbered_regs;
1438 /* Conflicts between asm-declared register variables and the clobber
1439 list are not allowed. */
1440 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1441 && DECL_REGISTER (decl)
1442 && REG_P (DECL_RTL (decl))
1443 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1445 rtx reg = DECL_RTL (decl);
1446 unsigned int regno;
1448 for (regno = REGNO (reg);
1449 regno < (REGNO (reg)
1450 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1451 regno++)
1452 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1454 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1455 IDENTIFIER_POINTER (DECL_NAME (decl)));
1457 /* Reset registerness to stop multiple errors emitted for a
1458 single variable. */
1459 DECL_REGISTER (decl) = 0;
1460 return true;
1463 return false;
1466 /* Generate RTL for an asm statement with arguments.
1467 STRING is the instruction template.
1468 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1469 Each output or input has an expression in the TREE_VALUE and
1470 and a tree list in TREE_PURPOSE which in turn contains a constraint
1471 name in TREE_VALUE (or NULL_TREE) and a constraint string
1472 in TREE_PURPOSE.
1473 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1474 that is clobbered by this insn.
1476 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1477 Some elements of OUTPUTS may be replaced with trees representing temporary
1478 values. The caller should copy those temporary values to the originally
1479 specified lvalues.
1481 VOL nonzero means the insn is volatile; don't optimize it. */
1483 void
1484 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1485 tree string, outputs, inputs, clobbers;
1486 int vol;
1487 const char *filename;
1488 int line;
1490 rtvec argvec, constraintvec;
1491 rtx body;
1492 int ninputs = list_length (inputs);
1493 int noutputs = list_length (outputs);
1494 int ninout;
1495 int nclobbers;
1496 HARD_REG_SET clobbered_regs;
1497 int clobber_conflict_found = 0;
1498 tree tail;
1499 int i;
1500 /* Vector of RTX's of evaluated output operands. */
1501 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1502 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1503 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1504 enum machine_mode *inout_mode
1505 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1506 const char **constraints
1507 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1508 int old_generating_concat_p = generating_concat_p;
1510 /* An ASM with no outputs needs to be treated as volatile, for now. */
1511 if (noutputs == 0)
1512 vol = 1;
1514 if (! check_operand_nalternatives (outputs, inputs))
1515 return;
1517 if (! check_unique_operand_names (outputs, inputs))
1518 return;
1520 string = resolve_operand_names (string, outputs, inputs, constraints);
1522 #ifdef MD_ASM_CLOBBERS
1523 /* Sometimes we wish to automatically clobber registers across an asm.
1524 Case in point is when the i386 backend moved from cc0 to a hard reg --
1525 maintaining source-level compatibility means automatically clobbering
1526 the flags register. */
1527 MD_ASM_CLOBBERS (clobbers);
1528 #endif
1530 /* Count the number of meaningful clobbered registers, ignoring what
1531 we would ignore later. */
1532 nclobbers = 0;
1533 CLEAR_HARD_REG_SET (clobbered_regs);
1534 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1536 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1538 i = decode_reg_name (regname);
1539 if (i >= 0 || i == -4)
1540 ++nclobbers;
1541 else if (i == -2)
1542 error ("unknown register name `%s' in `asm'", regname);
1544 /* Mark clobbered registers. */
1545 if (i >= 0)
1547 /* Clobbering the PIC register is an error */
1548 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1550 error ("PIC register `%s' clobbered in `asm'", regname);
1551 return;
1554 SET_HARD_REG_BIT (clobbered_regs, i);
1558 clear_last_expr ();
1560 /* First pass over inputs and outputs checks validity and sets
1561 mark_addressable if needed. */
1563 ninout = 0;
1564 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1566 tree val = TREE_VALUE (tail);
1567 tree type = TREE_TYPE (val);
1568 const char *constraint;
1569 bool is_inout;
1570 bool allows_reg;
1571 bool allows_mem;
1573 /* If there's an erroneous arg, emit no insn. */
1574 if (type == error_mark_node)
1575 return;
1577 /* Try to parse the output constraint. If that fails, there's
1578 no point in going further. */
1579 constraint = constraints[i];
1580 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1581 &allows_mem, &allows_reg, &is_inout))
1582 return;
1584 if (! allows_reg
1585 && (allows_mem
1586 || is_inout
1587 || (DECL_P (val)
1588 && GET_CODE (DECL_RTL (val)) == REG
1589 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1590 (*lang_hooks.mark_addressable) (val);
1592 if (is_inout)
1593 ninout++;
1596 ninputs += ninout;
1597 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1599 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1600 return;
1603 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1605 bool allows_reg, allows_mem;
1606 const char *constraint;
1608 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1609 would get VOIDmode and that could cause a crash in reload. */
1610 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1611 return;
1613 constraint = constraints[i + noutputs];
1614 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1615 constraints, &allows_mem, &allows_reg))
1616 return;
1618 if (! allows_reg && allows_mem)
1619 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1622 /* Second pass evaluates arguments. */
1624 ninout = 0;
1625 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1627 tree val = TREE_VALUE (tail);
1628 tree type = TREE_TYPE (val);
1629 bool is_inout;
1630 bool allows_reg;
1631 bool allows_mem;
1632 rtx op;
1634 if (!parse_output_constraint (&constraints[i], i, ninputs,
1635 noutputs, &allows_mem, &allows_reg,
1636 &is_inout))
1637 abort ();
1639 /* If an output operand is not a decl or indirect ref and our constraint
1640 allows a register, make a temporary to act as an intermediate.
1641 Make the asm insn write into that, then our caller will copy it to
1642 the real output operand. Likewise for promoted variables. */
1644 generating_concat_p = 0;
1646 real_output_rtx[i] = NULL_RTX;
1647 if ((TREE_CODE (val) == INDIRECT_REF
1648 && allows_mem)
1649 || (DECL_P (val)
1650 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1651 && ! (GET_CODE (DECL_RTL (val)) == REG
1652 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1653 || ! allows_reg
1654 || is_inout)
1656 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1657 if (GET_CODE (op) == MEM)
1658 op = validize_mem (op);
1660 if (! allows_reg && GET_CODE (op) != MEM)
1661 error ("output number %d not directly addressable", i);
1662 if ((! allows_mem && GET_CODE (op) == MEM)
1663 || GET_CODE (op) == CONCAT)
1665 real_output_rtx[i] = protect_from_queue (op, 1);
1666 op = gen_reg_rtx (GET_MODE (op));
1667 if (is_inout)
1668 emit_move_insn (op, real_output_rtx[i]);
1671 else
1673 op = assign_temp (type, 0, 0, 1);
1674 op = validize_mem (op);
1675 TREE_VALUE (tail) = make_tree (type, op);
1677 output_rtx[i] = op;
1679 generating_concat_p = old_generating_concat_p;
1681 if (is_inout)
1683 inout_mode[ninout] = TYPE_MODE (type);
1684 inout_opnum[ninout++] = i;
1687 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1688 clobber_conflict_found = 1;
1691 /* Make vectors for the expression-rtx, constraint strings,
1692 and named operands. */
1694 argvec = rtvec_alloc (ninputs);
1695 constraintvec = rtvec_alloc (ninputs);
1697 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1698 : GET_MODE (output_rtx[0])),
1699 TREE_STRING_POINTER (string),
1700 empty_string, 0, argvec, constraintvec,
1701 filename, line);
1703 MEM_VOLATILE_P (body) = vol;
1705 /* Eval the inputs and put them into ARGVEC.
1706 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1708 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1710 bool allows_reg, allows_mem;
1711 const char *constraint;
1712 tree val, type;
1713 rtx op;
1715 constraint = constraints[i + noutputs];
1716 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1717 constraints, &allows_mem, &allows_reg))
1718 abort ();
1720 generating_concat_p = 0;
1722 val = TREE_VALUE (tail);
1723 type = TREE_TYPE (val);
1724 op = expand_expr (val, NULL_RTX, VOIDmode, 0);
1726 /* Never pass a CONCAT to an ASM. */
1727 if (GET_CODE (op) == CONCAT)
1728 op = force_reg (GET_MODE (op), op);
1729 else if (GET_CODE (op) == MEM)
1730 op = validize_mem (op);
1732 if (asm_operand_ok (op, constraint) <= 0)
1734 if (allows_reg)
1735 op = force_reg (TYPE_MODE (type), op);
1736 else if (!allows_mem)
1737 warning ("asm operand %d probably doesn't match constraints",
1738 i + noutputs);
1739 else if (CONSTANT_P (op))
1741 op = force_const_mem (TYPE_MODE (type), op);
1742 op = validize_mem (op);
1744 else if (GET_CODE (op) == REG
1745 || GET_CODE (op) == SUBREG
1746 || GET_CODE (op) == ADDRESSOF
1747 || GET_CODE (op) == CONCAT)
1749 tree qual_type = build_qualified_type (type,
1750 (TYPE_QUALS (type)
1751 | TYPE_QUAL_CONST));
1752 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1753 memloc = validize_mem (memloc);
1754 emit_move_insn (memloc, op);
1755 op = memloc;
1758 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1760 /* We won't recognize volatile memory as available a
1761 memory_operand at this point. Ignore it. */
1763 else if (queued_subexp_p (op))
1765 else
1766 /* ??? Leave this only until we have experience with what
1767 happens in combine and elsewhere when constraints are
1768 not satisfied. */
1769 warning ("asm operand %d probably doesn't match constraints",
1770 i + noutputs);
1773 generating_concat_p = old_generating_concat_p;
1774 ASM_OPERANDS_INPUT (body, i) = op;
1776 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1777 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1779 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1780 clobber_conflict_found = 1;
1783 /* Protect all the operands from the queue now that they have all been
1784 evaluated. */
1786 generating_concat_p = 0;
1788 for (i = 0; i < ninputs - ninout; i++)
1789 ASM_OPERANDS_INPUT (body, i)
1790 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1792 for (i = 0; i < noutputs; i++)
1793 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1795 /* For in-out operands, copy output rtx to input rtx. */
1796 for (i = 0; i < ninout; i++)
1798 int j = inout_opnum[i];
1799 char buffer[16];
1801 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1802 = output_rtx[j];
1804 sprintf (buffer, "%d", j);
1805 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1806 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1809 generating_concat_p = old_generating_concat_p;
1811 /* Now, for each output, construct an rtx
1812 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1813 ARGVEC CONSTRAINTS OPNAMES))
1814 If there is more than one, put them inside a PARALLEL. */
1816 if (noutputs == 1 && nclobbers == 0)
1818 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1819 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1822 else if (noutputs == 0 && nclobbers == 0)
1824 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1825 emit_insn (body);
1828 else
1830 rtx obody = body;
1831 int num = noutputs;
1833 if (num == 0)
1834 num = 1;
1836 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1838 /* For each output operand, store a SET. */
1839 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1841 XVECEXP (body, 0, i)
1842 = gen_rtx_SET (VOIDmode,
1843 output_rtx[i],
1844 gen_rtx_ASM_OPERANDS
1845 (GET_MODE (output_rtx[i]),
1846 TREE_STRING_POINTER (string),
1847 constraints[i], i, argvec, constraintvec,
1848 filename, line));
1850 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1853 /* If there are no outputs (but there are some clobbers)
1854 store the bare ASM_OPERANDS into the PARALLEL. */
1856 if (i == 0)
1857 XVECEXP (body, 0, i++) = obody;
1859 /* Store (clobber REG) for each clobbered register specified. */
1861 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1863 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1864 int j = decode_reg_name (regname);
1865 rtx clobbered_reg;
1867 if (j < 0)
1869 if (j == -3) /* `cc', which is not a register */
1870 continue;
1872 if (j == -4) /* `memory', don't cache memory across asm */
1874 XVECEXP (body, 0, i++)
1875 = gen_rtx_CLOBBER (VOIDmode,
1876 gen_rtx_MEM
1877 (BLKmode,
1878 gen_rtx_SCRATCH (VOIDmode)));
1879 continue;
1882 /* Ignore unknown register, error already signaled. */
1883 continue;
1886 /* Use QImode since that's guaranteed to clobber just one reg. */
1887 clobbered_reg = gen_rtx_REG (QImode, j);
1889 /* Do sanity check for overlap between clobbers and respectively
1890 input and outputs that hasn't been handled. Such overlap
1891 should have been detected and reported above. */
1892 if (!clobber_conflict_found)
1894 int opno;
1896 /* We test the old body (obody) contents to avoid tripping
1897 over the under-construction body. */
1898 for (opno = 0; opno < noutputs; opno++)
1899 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1900 internal_error ("asm clobber conflict with output operand");
1902 for (opno = 0; opno < ninputs - ninout; opno++)
1903 if (reg_overlap_mentioned_p (clobbered_reg,
1904 ASM_OPERANDS_INPUT (obody, opno)))
1905 internal_error ("asm clobber conflict with input operand");
1908 XVECEXP (body, 0, i++)
1909 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1912 emit_insn (body);
1915 /* For any outputs that needed reloading into registers, spill them
1916 back to where they belong. */
1917 for (i = 0; i < noutputs; ++i)
1918 if (real_output_rtx[i])
1919 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1921 free_temp_slots ();
1924 /* A subroutine of expand_asm_operands. Check that all operands have
1925 the same number of alternatives. Return true if so. */
1927 static bool
1928 check_operand_nalternatives (outputs, inputs)
1929 tree outputs, inputs;
1931 if (outputs || inputs)
1933 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1934 int nalternatives
1935 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1936 tree next = inputs;
1938 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1940 error ("too many alternatives in `asm'");
1941 return false;
1944 tmp = outputs;
1945 while (tmp)
1947 const char *constraint
1948 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1950 if (n_occurrences (',', constraint) != nalternatives)
1952 error ("operand constraints for `asm' differ in number of alternatives");
1953 return false;
1956 if (TREE_CHAIN (tmp))
1957 tmp = TREE_CHAIN (tmp);
1958 else
1959 tmp = next, next = 0;
1963 return true;
1966 /* A subroutine of expand_asm_operands. Check that all operand names
1967 are unique. Return true if so. We rely on the fact that these names
1968 are identifiers, and so have been canonicalized by get_identifier,
1969 so all we need are pointer comparisons. */
1971 static bool
1972 check_unique_operand_names (outputs, inputs)
1973 tree outputs, inputs;
1975 tree i, j;
1977 for (i = outputs; i ; i = TREE_CHAIN (i))
1979 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1980 if (! i_name)
1981 continue;
1983 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1984 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1985 goto failure;
1988 for (i = inputs; i ; i = TREE_CHAIN (i))
1990 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1991 if (! i_name)
1992 continue;
1994 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1995 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1996 goto failure;
1997 for (j = outputs; j ; j = TREE_CHAIN (j))
1998 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1999 goto failure;
2002 return true;
2004 failure:
2005 error ("duplicate asm operand name '%s'",
2006 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
2007 return false;
2010 /* A subroutine of expand_asm_operands. Resolve the names of the operands
2011 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
2012 STRING and in the constraints to those numbers. */
2014 static tree
2015 resolve_operand_names (string, outputs, inputs, pconstraints)
2016 tree string;
2017 tree outputs, inputs;
2018 const char **pconstraints;
2020 char *buffer = xstrdup (TREE_STRING_POINTER (string));
2021 char *p;
2022 tree t;
2024 /* Assume that we will not need extra space to perform the substitution.
2025 This because we get to remove '[' and ']', which means we cannot have
2026 a problem until we have more than 999 operands. */
2028 p = buffer;
2029 while ((p = strchr (p, '%')) != NULL)
2031 if (p[1] == '[')
2032 p += 1;
2033 else if (ISALPHA (p[1]) && p[2] == '[')
2034 p += 2;
2035 else
2037 p += 1;
2038 continue;
2041 p = resolve_operand_name_1 (p, outputs, inputs);
2044 string = build_string (strlen (buffer), buffer);
2045 free (buffer);
2047 /* Collect output constraints here because it's convenient.
2048 There should be no named operands here; this is verified
2049 in expand_asm_operand. */
2050 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2051 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2053 /* Substitute [<name>] in input constraint strings. */
2054 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2056 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2057 if (strchr (c, '[') == NULL)
2058 *pconstraints = c;
2059 else
2061 p = buffer = xstrdup (c);
2062 while ((p = strchr (p, '[')) != NULL)
2063 p = resolve_operand_name_1 (p, outputs, inputs);
2065 *pconstraints = ggc_alloc_string (buffer, -1);
2066 free (buffer);
2070 return string;
2073 /* A subroutine of resolve_operand_names. P points to the '[' for a
2074 potential named operand of the form [<name>]. In place, replace
2075 the name and brackets with a number. Return a pointer to the
2076 balance of the string after substitution. */
2078 static char *
2079 resolve_operand_name_1 (p, outputs, inputs)
2080 char *p;
2081 tree outputs, inputs;
2083 char *q;
2084 int op;
2085 tree t;
2086 size_t len;
2088 /* Collect the operand name. */
2089 q = strchr (p, ']');
2090 if (!q)
2092 error ("missing close brace for named operand");
2093 return strchr (p, '\0');
2095 len = q - p - 1;
2097 /* Resolve the name to a number. */
2098 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2100 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2101 if (name)
2103 const char *c = TREE_STRING_POINTER (name);
2104 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2105 goto found;
2108 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2110 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2111 if (name)
2113 const char *c = TREE_STRING_POINTER (name);
2114 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2115 goto found;
2119 *q = '\0';
2120 error ("undefined named operand '%s'", p + 1);
2121 op = 0;
2122 found:
2124 /* Replace the name with the number. Unfortunately, not all libraries
2125 get the return value of sprintf correct, so search for the end of the
2126 generated string by hand. */
2127 sprintf (p, "%d", op);
2128 p = strchr (p, '\0');
2130 /* Verify the no extra buffer space assumption. */
2131 if (p > q)
2132 abort ();
2134 /* Shift the rest of the buffer down to fill the gap. */
2135 memmove (p, q + 1, strlen (q + 1) + 1);
2137 return p;
2140 /* Generate RTL to evaluate the expression EXP
2141 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2142 Provided just for backward-compatibility. expand_expr_stmt_value()
2143 should be used for new code. */
2145 void
2146 expand_expr_stmt (exp)
2147 tree exp;
2149 expand_expr_stmt_value (exp, -1, 1);
2152 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2153 whether to (1) save the value of the expression, (0) discard it or
2154 (-1) use expr_stmts_for_value to tell. The use of -1 is
2155 deprecated, and retained only for backward compatibility. */
2157 void
2158 expand_expr_stmt_value (exp, want_value, maybe_last)
2159 tree exp;
2160 int want_value, maybe_last;
2162 rtx value;
2163 tree type;
2165 if (want_value == -1)
2166 want_value = expr_stmts_for_value != 0;
2168 /* If -Wextra, warn about statements with no side effects,
2169 except for an explicit cast to void (e.g. for assert()), and
2170 except for last statement in ({...}) where they may be useful. */
2171 if (! want_value
2172 && (expr_stmts_for_value == 0 || ! maybe_last)
2173 && exp != error_mark_node)
2175 if (! TREE_SIDE_EFFECTS (exp))
2177 if ((extra_warnings || warn_unused_value)
2178 && !(TREE_CODE (exp) == CONVERT_EXPR
2179 && VOID_TYPE_P (TREE_TYPE (exp))))
2180 warning_with_file_and_line (emit_filename, emit_lineno,
2181 "statement with no effect");
2183 else if (warn_unused_value)
2184 warn_if_unused_value (exp);
2187 /* If EXP is of function type and we are expanding statements for
2188 value, convert it to pointer-to-function. */
2189 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2190 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2192 /* The call to `expand_expr' could cause last_expr_type and
2193 last_expr_value to get reset. Therefore, we set last_expr_value
2194 and last_expr_type *after* calling expand_expr. */
2195 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2196 VOIDmode, 0);
2197 type = TREE_TYPE (exp);
2199 /* If all we do is reference a volatile value in memory,
2200 copy it to a register to be sure it is actually touched. */
2201 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2203 if (TYPE_MODE (type) == VOIDmode)
2205 else if (TYPE_MODE (type) != BLKmode)
2206 value = copy_to_reg (value);
2207 else
2209 rtx lab = gen_label_rtx ();
2211 /* Compare the value with itself to reference it. */
2212 emit_cmp_and_jump_insns (value, value, EQ,
2213 expand_expr (TYPE_SIZE (type),
2214 NULL_RTX, VOIDmode, 0),
2215 BLKmode, 0, lab);
2216 emit_label (lab);
2220 /* If this expression is part of a ({...}) and is in memory, we may have
2221 to preserve temporaries. */
2222 preserve_temp_slots (value);
2224 /* Free any temporaries used to evaluate this expression. Any temporary
2225 used as a result of this expression will already have been preserved
2226 above. */
2227 free_temp_slots ();
2229 if (want_value)
2231 last_expr_value = value;
2232 last_expr_type = type;
2235 emit_queue ();
2238 /* Warn if EXP contains any computations whose results are not used.
2239 Return 1 if a warning is printed; 0 otherwise. */
2242 warn_if_unused_value (exp)
2243 tree exp;
2245 if (TREE_USED (exp))
2246 return 0;
2248 /* Don't warn about void constructs. This includes casting to void,
2249 void function calls, and statement expressions with a final cast
2250 to void. */
2251 if (VOID_TYPE_P (TREE_TYPE (exp)))
2252 return 0;
2254 switch (TREE_CODE (exp))
2256 case PREINCREMENT_EXPR:
2257 case POSTINCREMENT_EXPR:
2258 case PREDECREMENT_EXPR:
2259 case POSTDECREMENT_EXPR:
2260 case MODIFY_EXPR:
2261 case INIT_EXPR:
2262 case TARGET_EXPR:
2263 case CALL_EXPR:
2264 case METHOD_CALL_EXPR:
2265 case RTL_EXPR:
2266 case TRY_CATCH_EXPR:
2267 case WITH_CLEANUP_EXPR:
2268 case EXIT_EXPR:
2269 return 0;
2271 case BIND_EXPR:
2272 /* For a binding, warn if no side effect within it. */
2273 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2275 case SAVE_EXPR:
2276 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2278 case TRUTH_ORIF_EXPR:
2279 case TRUTH_ANDIF_EXPR:
2280 /* In && or ||, warn if 2nd operand has no side effect. */
2281 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2283 case COMPOUND_EXPR:
2284 if (TREE_NO_UNUSED_WARNING (exp))
2285 return 0;
2286 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2287 return 1;
2288 /* Let people do `(foo (), 0)' without a warning. */
2289 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2290 return 0;
2291 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2293 case NOP_EXPR:
2294 case CONVERT_EXPR:
2295 case NON_LVALUE_EXPR:
2296 /* Don't warn about conversions not explicit in the user's program. */
2297 if (TREE_NO_UNUSED_WARNING (exp))
2298 return 0;
2299 /* Assignment to a cast usually results in a cast of a modify.
2300 Don't complain about that. There can be an arbitrary number of
2301 casts before the modify, so we must loop until we find the first
2302 non-cast expression and then test to see if that is a modify. */
2304 tree tem = TREE_OPERAND (exp, 0);
2306 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2307 tem = TREE_OPERAND (tem, 0);
2309 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2310 || TREE_CODE (tem) == CALL_EXPR)
2311 return 0;
2313 goto maybe_warn;
2315 case INDIRECT_REF:
2316 /* Don't warn about automatic dereferencing of references, since
2317 the user cannot control it. */
2318 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2319 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2320 /* Fall through. */
2322 default:
2323 /* Referencing a volatile value is a side effect, so don't warn. */
2324 if ((DECL_P (exp)
2325 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2326 && TREE_THIS_VOLATILE (exp))
2327 return 0;
2329 /* If this is an expression which has no operands, there is no value
2330 to be unused. There are no such language-independent codes,
2331 but front ends may define such. */
2332 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2333 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2334 return 0;
2336 maybe_warn:
2337 /* If this is an expression with side effects, don't warn. */
2338 if (TREE_SIDE_EFFECTS (exp))
2339 return 0;
2341 warning_with_file_and_line (emit_filename, emit_lineno,
2342 "value computed is not used");
2343 return 1;
2347 /* Clear out the memory of the last expression evaluated. */
2349 void
2350 clear_last_expr ()
2352 last_expr_type = NULL_TREE;
2353 last_expr_value = NULL_RTX;
2356 /* Begin a statement-expression, i.e., a series of statements which
2357 may return a value. Return the RTL_EXPR for this statement expr.
2358 The caller must save that value and pass it to
2359 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2360 in the statement-expression are deallocated at the end of the
2361 expression. */
2363 tree
2364 expand_start_stmt_expr (has_scope)
2365 int has_scope;
2367 tree t;
2369 /* Make the RTL_EXPR node temporary, not momentary,
2370 so that rtl_expr_chain doesn't become garbage. */
2371 t = make_node (RTL_EXPR);
2372 do_pending_stack_adjust ();
2373 if (has_scope)
2374 start_sequence_for_rtl_expr (t);
2375 else
2376 start_sequence ();
2377 NO_DEFER_POP;
2378 expr_stmts_for_value++;
2379 return t;
2382 /* Restore the previous state at the end of a statement that returns a value.
2383 Returns a tree node representing the statement's value and the
2384 insns to compute the value.
2386 The nodes of that expression have been freed by now, so we cannot use them.
2387 But we don't want to do that anyway; the expression has already been
2388 evaluated and now we just want to use the value. So generate a RTL_EXPR
2389 with the proper type and RTL value.
2391 If the last substatement was not an expression,
2392 return something with type `void'. */
2394 tree
2395 expand_end_stmt_expr (t)
2396 tree t;
2398 OK_DEFER_POP;
2400 if (! last_expr_value || ! last_expr_type)
2402 last_expr_value = const0_rtx;
2403 last_expr_type = void_type_node;
2405 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2406 /* Remove any possible QUEUED. */
2407 last_expr_value = protect_from_queue (last_expr_value, 0);
2409 emit_queue ();
2411 TREE_TYPE (t) = last_expr_type;
2412 RTL_EXPR_RTL (t) = last_expr_value;
2413 RTL_EXPR_SEQUENCE (t) = get_insns ();
2415 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2417 end_sequence ();
2419 /* Don't consider deleting this expr or containing exprs at tree level. */
2420 TREE_SIDE_EFFECTS (t) = 1;
2421 /* Propagate volatility of the actual RTL expr. */
2422 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2424 clear_last_expr ();
2425 expr_stmts_for_value--;
2427 return t;
2430 /* Generate RTL for the start of an if-then. COND is the expression
2431 whose truth should be tested.
2433 If EXITFLAG is nonzero, this conditional is visible to
2434 `exit_something'. */
2436 void
2437 expand_start_cond (cond, exitflag)
2438 tree cond;
2439 int exitflag;
2441 struct nesting *thiscond = ALLOC_NESTING ();
2443 /* Make an entry on cond_stack for the cond we are entering. */
2445 thiscond->desc = COND_NESTING;
2446 thiscond->next = cond_stack;
2447 thiscond->all = nesting_stack;
2448 thiscond->depth = ++nesting_depth;
2449 thiscond->data.cond.next_label = gen_label_rtx ();
2450 /* Before we encounter an `else', we don't need a separate exit label
2451 unless there are supposed to be exit statements
2452 to exit this conditional. */
2453 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2454 thiscond->data.cond.endif_label = thiscond->exit_label;
2455 cond_stack = thiscond;
2456 nesting_stack = thiscond;
2458 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2461 /* Generate RTL between then-clause and the elseif-clause
2462 of an if-then-elseif-.... */
2464 void
2465 expand_start_elseif (cond)
2466 tree cond;
2468 if (cond_stack->data.cond.endif_label == 0)
2469 cond_stack->data.cond.endif_label = gen_label_rtx ();
2470 emit_jump (cond_stack->data.cond.endif_label);
2471 emit_label (cond_stack->data.cond.next_label);
2472 cond_stack->data.cond.next_label = gen_label_rtx ();
2473 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2476 /* Generate RTL between the then-clause and the else-clause
2477 of an if-then-else. */
2479 void
2480 expand_start_else ()
2482 if (cond_stack->data.cond.endif_label == 0)
2483 cond_stack->data.cond.endif_label = gen_label_rtx ();
2485 emit_jump (cond_stack->data.cond.endif_label);
2486 emit_label (cond_stack->data.cond.next_label);
2487 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2490 /* After calling expand_start_else, turn this "else" into an "else if"
2491 by providing another condition. */
2493 void
2494 expand_elseif (cond)
2495 tree cond;
2497 cond_stack->data.cond.next_label = gen_label_rtx ();
2498 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2501 /* Generate RTL for the end of an if-then.
2502 Pop the record for it off of cond_stack. */
2504 void
2505 expand_end_cond ()
2507 struct nesting *thiscond = cond_stack;
2509 do_pending_stack_adjust ();
2510 if (thiscond->data.cond.next_label)
2511 emit_label (thiscond->data.cond.next_label);
2512 if (thiscond->data.cond.endif_label)
2513 emit_label (thiscond->data.cond.endif_label);
2515 POPSTACK (cond_stack);
2516 clear_last_expr ();
2519 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2520 loop should be exited by `exit_something'. This is a loop for which
2521 `expand_continue' will jump to the top of the loop.
2523 Make an entry on loop_stack to record the labels associated with
2524 this loop. */
2526 struct nesting *
2527 expand_start_loop (exit_flag)
2528 int exit_flag;
2530 struct nesting *thisloop = ALLOC_NESTING ();
2532 /* Make an entry on loop_stack for the loop we are entering. */
2534 thisloop->desc = LOOP_NESTING;
2535 thisloop->next = loop_stack;
2536 thisloop->all = nesting_stack;
2537 thisloop->depth = ++nesting_depth;
2538 thisloop->data.loop.start_label = gen_label_rtx ();
2539 thisloop->data.loop.end_label = gen_label_rtx ();
2540 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2541 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2542 loop_stack = thisloop;
2543 nesting_stack = thisloop;
2545 do_pending_stack_adjust ();
2546 emit_queue ();
2547 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2548 emit_label (thisloop->data.loop.start_label);
2550 return thisloop;
2553 /* Like expand_start_loop but for a loop where the continuation point
2554 (for expand_continue_loop) will be specified explicitly. */
2556 struct nesting *
2557 expand_start_loop_continue_elsewhere (exit_flag)
2558 int exit_flag;
2560 struct nesting *thisloop = expand_start_loop (exit_flag);
2561 loop_stack->data.loop.continue_label = gen_label_rtx ();
2562 return thisloop;
2565 /* Begin a null, aka do { } while (0) "loop". But since the contents
2566 of said loop can still contain a break, we must frob the loop nest. */
2568 struct nesting *
2569 expand_start_null_loop ()
2571 struct nesting *thisloop = ALLOC_NESTING ();
2573 /* Make an entry on loop_stack for the loop we are entering. */
2575 thisloop->desc = LOOP_NESTING;
2576 thisloop->next = loop_stack;
2577 thisloop->all = nesting_stack;
2578 thisloop->depth = ++nesting_depth;
2579 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2580 thisloop->data.loop.end_label = gen_label_rtx ();
2581 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2582 thisloop->exit_label = thisloop->data.loop.end_label;
2583 loop_stack = thisloop;
2584 nesting_stack = thisloop;
2586 return thisloop;
2589 /* Specify the continuation point for a loop started with
2590 expand_start_loop_continue_elsewhere.
2591 Use this at the point in the code to which a continue statement
2592 should jump. */
2594 void
2595 expand_loop_continue_here ()
2597 do_pending_stack_adjust ();
2598 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2599 emit_label (loop_stack->data.loop.continue_label);
2602 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2603 Pop the block off of loop_stack. */
2605 void
2606 expand_end_loop ()
2608 rtx start_label = loop_stack->data.loop.start_label;
2609 rtx etc_note;
2610 int eh_regions, debug_blocks;
2611 bool empty_test;
2613 /* Mark the continue-point at the top of the loop if none elsewhere. */
2614 if (start_label == loop_stack->data.loop.continue_label)
2615 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2617 do_pending_stack_adjust ();
2619 /* If the loop starts with a loop exit, roll that to the end where
2620 it will optimize together with the jump back.
2622 If the loop presently looks like this (in pseudo-C):
2624 LOOP_BEG
2625 start_label:
2626 if (test) goto end_label;
2627 LOOP_END_TOP_COND
2628 body;
2629 goto start_label;
2630 end_label:
2632 transform it to look like:
2634 LOOP_BEG
2635 goto start_label;
2636 top_label:
2637 body;
2638 start_label:
2639 if (test) goto end_label;
2640 goto top_label;
2641 end_label:
2643 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2644 the end of the entry conditional. Without this, our lexical scan
2645 can't tell the difference between an entry conditional and a
2646 body conditional that exits the loop. Mistaking the two means
2647 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2648 screw up loop unrolling.
2650 Things will be oh so much better when loop optimization is done
2651 off of a proper control flow graph... */
2653 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2655 empty_test = true;
2656 eh_regions = debug_blocks = 0;
2657 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2658 if (GET_CODE (etc_note) == NOTE)
2660 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2661 break;
2663 /* We must not walk into a nested loop. */
2664 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2666 etc_note = NULL_RTX;
2667 break;
2670 /* At the same time, scan for EH region notes, as we don't want
2671 to scrog region nesting. This shouldn't happen, but... */
2672 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2673 eh_regions++;
2674 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2676 if (--eh_regions < 0)
2677 /* We've come to the end of an EH region, but never saw the
2678 beginning of that region. That means that an EH region
2679 begins before the top of the loop, and ends in the middle
2680 of it. The existence of such a situation violates a basic
2681 assumption in this code, since that would imply that even
2682 when EH_REGIONS is zero, we might move code out of an
2683 exception region. */
2684 abort ();
2687 /* Likewise for debug scopes. In this case we'll either (1) move
2688 all of the notes if they are properly nested or (2) leave the
2689 notes alone and only rotate the loop at high optimization
2690 levels when we expect to scrog debug info. */
2691 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2692 debug_blocks++;
2693 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2694 debug_blocks--;
2696 else if (INSN_P (etc_note))
2697 empty_test = false;
2699 if (etc_note
2700 && optimize
2701 && ! empty_test
2702 && eh_regions == 0
2703 && (debug_blocks == 0 || optimize >= 2)
2704 && NEXT_INSN (etc_note) != NULL_RTX
2705 && ! any_condjump_p (get_last_insn ()))
2707 /* We found one. Move everything from START to ETC to the end
2708 of the loop, and add a jump from the top of the loop. */
2709 rtx top_label = gen_label_rtx ();
2710 rtx start_move = start_label;
2712 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2713 then we want to move this note also. */
2714 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2715 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2716 start_move = PREV_INSN (start_move);
2718 emit_label_before (top_label, start_move);
2720 /* Actually move the insns. If the debug scopes are nested, we
2721 can move everything at once. Otherwise we have to move them
2722 one by one and squeeze out the block notes. */
2723 if (debug_blocks == 0)
2724 reorder_insns (start_move, etc_note, get_last_insn ());
2725 else
2727 rtx insn, next_insn;
2728 for (insn = start_move; insn; insn = next_insn)
2730 /* Figure out which insn comes after this one. We have
2731 to do this before we move INSN. */
2732 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2734 if (GET_CODE (insn) == NOTE
2735 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2736 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2737 continue;
2739 reorder_insns (insn, insn, get_last_insn ());
2743 /* Add the jump from the top of the loop. */
2744 emit_jump_insn_before (gen_jump (start_label), top_label);
2745 emit_barrier_before (top_label);
2746 start_label = top_label;
2749 emit_jump (start_label);
2750 emit_note (NULL, NOTE_INSN_LOOP_END);
2751 emit_label (loop_stack->data.loop.end_label);
2753 POPSTACK (loop_stack);
2755 clear_last_expr ();
2758 /* Finish a null loop, aka do { } while (0). */
2760 void
2761 expand_end_null_loop ()
2763 do_pending_stack_adjust ();
2764 emit_label (loop_stack->data.loop.end_label);
2766 POPSTACK (loop_stack);
2768 clear_last_expr ();
2771 /* Generate a jump to the current loop's continue-point.
2772 This is usually the top of the loop, but may be specified
2773 explicitly elsewhere. If not currently inside a loop,
2774 return 0 and do nothing; caller will print an error message. */
2777 expand_continue_loop (whichloop)
2778 struct nesting *whichloop;
2780 /* Emit information for branch prediction. */
2781 rtx note;
2783 if (flag_guess_branch_prob)
2785 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2786 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2788 clear_last_expr ();
2789 if (whichloop == 0)
2790 whichloop = loop_stack;
2791 if (whichloop == 0)
2792 return 0;
2793 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2794 NULL_RTX);
2795 return 1;
2798 /* Generate a jump to exit the current loop. If not currently inside a loop,
2799 return 0 and do nothing; caller will print an error message. */
2802 expand_exit_loop (whichloop)
2803 struct nesting *whichloop;
2805 clear_last_expr ();
2806 if (whichloop == 0)
2807 whichloop = loop_stack;
2808 if (whichloop == 0)
2809 return 0;
2810 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2811 return 1;
2814 /* Generate a conditional jump to exit the current loop if COND
2815 evaluates to zero. If not currently inside a loop,
2816 return 0 and do nothing; caller will print an error message. */
2819 expand_exit_loop_if_false (whichloop, cond)
2820 struct nesting *whichloop;
2821 tree cond;
2823 rtx label;
2824 clear_last_expr ();
2826 if (whichloop == 0)
2827 whichloop = loop_stack;
2828 if (whichloop == 0)
2829 return 0;
2831 if (integer_nonzerop (cond))
2832 return 1;
2833 if (integer_zerop (cond))
2834 return expand_exit_loop (whichloop);
2836 /* Check if we definitely won't need a fixup. */
2837 if (whichloop == nesting_stack)
2839 jumpifnot (cond, whichloop->data.loop.end_label);
2840 return 1;
2843 /* In order to handle fixups, we actually create a conditional jump
2844 around an unconditional branch to exit the loop. If fixups are
2845 necessary, they go before the unconditional branch. */
2847 label = gen_label_rtx ();
2848 jumpif (cond, label);
2849 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2850 NULL_RTX);
2851 emit_label (label);
2853 return 1;
2856 /* Like expand_exit_loop_if_false except also emit a note marking
2857 the end of the conditional. Should only be used immediately
2858 after expand_loop_start. */
2861 expand_exit_loop_top_cond (whichloop, cond)
2862 struct nesting *whichloop;
2863 tree cond;
2865 if (! expand_exit_loop_if_false (whichloop, cond))
2866 return 0;
2868 emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2869 return 1;
2872 /* Return nonzero if we should preserve sub-expressions as separate
2873 pseudos. We never do so if we aren't optimizing. We always do so
2874 if -fexpensive-optimizations.
2876 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2877 the loop may still be a small one. */
2880 preserve_subexpressions_p ()
2882 rtx insn;
2884 if (flag_expensive_optimizations)
2885 return 1;
2887 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2888 return 0;
2890 insn = get_last_insn_anywhere ();
2892 return (insn
2893 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2894 < n_non_fixed_regs * 3));
2898 /* Generate a jump to exit the current loop, conditional, binding contour
2899 or case statement. Not all such constructs are visible to this function,
2900 only those started with EXIT_FLAG nonzero. Individual languages use
2901 the EXIT_FLAG parameter to control which kinds of constructs you can
2902 exit this way.
2904 If not currently inside anything that can be exited,
2905 return 0 and do nothing; caller will print an error message. */
2908 expand_exit_something ()
2910 struct nesting *n;
2911 clear_last_expr ();
2912 for (n = nesting_stack; n; n = n->all)
2913 if (n->exit_label != 0)
2915 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2916 return 1;
2919 return 0;
2922 /* Generate RTL to return from the current function, with no value.
2923 (That is, we do not do anything about returning any value.) */
2925 void
2926 expand_null_return ()
2928 rtx last_insn;
2930 last_insn = get_last_insn ();
2932 /* If this function was declared to return a value, but we
2933 didn't, clobber the return registers so that they are not
2934 propagated live to the rest of the function. */
2935 clobber_return_register ();
2937 expand_null_return_1 (last_insn);
2940 /* Try to guess whether the value of return means error code. */
2941 static enum br_predictor
2942 return_prediction (val)
2943 rtx val;
2945 /* Different heuristics for pointers and scalars. */
2946 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2948 /* NULL is usually not returned. */
2949 if (val == const0_rtx)
2950 return PRED_NULL_RETURN;
2952 else
2954 /* Negative return values are often used to indicate
2955 errors. */
2956 if (GET_CODE (val) == CONST_INT
2957 && INTVAL (val) < 0)
2958 return PRED_NEGATIVE_RETURN;
2959 /* Constant return values are also usually erors,
2960 zero/one often mean booleans so exclude them from the
2961 heuristics. */
2962 if (CONSTANT_P (val)
2963 && (val != const0_rtx && val != const1_rtx))
2964 return PRED_CONST_RETURN;
2966 return PRED_NO_PREDICTION;
2969 /* Generate RTL to return from the current function, with value VAL. */
2971 static void
2972 expand_value_return (val)
2973 rtx val;
2975 rtx last_insn;
2976 rtx return_reg;
2977 enum br_predictor pred;
2979 if (flag_guess_branch_prob
2980 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2982 /* Emit information for branch prediction. */
2983 rtx note;
2985 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2987 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2991 last_insn = get_last_insn ();
2992 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2994 /* Copy the value to the return location
2995 unless it's already there. */
2997 if (return_reg != val)
2999 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3000 #ifdef PROMOTE_FUNCTION_RETURN
3001 int unsignedp = TREE_UNSIGNED (type);
3002 enum machine_mode old_mode
3003 = DECL_MODE (DECL_RESULT (current_function_decl));
3004 enum machine_mode mode
3005 = promote_mode (type, old_mode, &unsignedp, 1);
3007 if (mode != old_mode)
3008 val = convert_modes (mode, old_mode, val, unsignedp);
3009 #endif
3010 if (GET_CODE (return_reg) == PARALLEL)
3011 emit_group_load (return_reg, val, int_size_in_bytes (type));
3012 else
3013 emit_move_insn (return_reg, val);
3016 expand_null_return_1 (last_insn);
3019 /* Output a return with no value. If LAST_INSN is nonzero,
3020 pretend that the return takes place after LAST_INSN. */
3022 static void
3023 expand_null_return_1 (last_insn)
3024 rtx last_insn;
3026 rtx end_label = cleanup_label ? cleanup_label : return_label;
3028 clear_pending_stack_adjust ();
3029 do_pending_stack_adjust ();
3030 clear_last_expr ();
3032 if (end_label == 0)
3033 end_label = return_label = gen_label_rtx ();
3034 expand_goto_internal (NULL_TREE, end_label, last_insn);
3037 /* Generate RTL to evaluate the expression RETVAL and return it
3038 from the current function. */
3040 void
3041 expand_return (retval)
3042 tree retval;
3044 /* If there are any cleanups to be performed, then they will
3045 be inserted following LAST_INSN. It is desirable
3046 that the last_insn, for such purposes, should be the
3047 last insn before computing the return value. Otherwise, cleanups
3048 which call functions can clobber the return value. */
3049 /* ??? rms: I think that is erroneous, because in C++ it would
3050 run destructors on variables that might be used in the subsequent
3051 computation of the return value. */
3052 rtx last_insn = 0;
3053 rtx result_rtl;
3054 rtx val = 0;
3055 tree retval_rhs;
3057 /* If function wants no value, give it none. */
3058 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3060 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3061 emit_queue ();
3062 expand_null_return ();
3063 return;
3066 if (retval == error_mark_node)
3068 /* Treat this like a return of no value from a function that
3069 returns a value. */
3070 expand_null_return ();
3071 return;
3073 else if (TREE_CODE (retval) == RESULT_DECL)
3074 retval_rhs = retval;
3075 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3076 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3077 retval_rhs = TREE_OPERAND (retval, 1);
3078 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3079 /* Recognize tail-recursive call to void function. */
3080 retval_rhs = retval;
3081 else
3082 retval_rhs = NULL_TREE;
3084 last_insn = get_last_insn ();
3086 /* Distribute return down conditional expr if either of the sides
3087 may involve tail recursion (see test below). This enhances the number
3088 of tail recursions we see. Don't do this always since it can produce
3089 sub-optimal code in some cases and we distribute assignments into
3090 conditional expressions when it would help. */
3092 if (optimize && retval_rhs != 0
3093 && frame_offset == 0
3094 && TREE_CODE (retval_rhs) == COND_EXPR
3095 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3096 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3098 rtx label = gen_label_rtx ();
3099 tree expr;
3101 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3102 start_cleanup_deferral ();
3103 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3104 DECL_RESULT (current_function_decl),
3105 TREE_OPERAND (retval_rhs, 1));
3106 TREE_SIDE_EFFECTS (expr) = 1;
3107 expand_return (expr);
3108 emit_label (label);
3110 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3111 DECL_RESULT (current_function_decl),
3112 TREE_OPERAND (retval_rhs, 2));
3113 TREE_SIDE_EFFECTS (expr) = 1;
3114 expand_return (expr);
3115 end_cleanup_deferral ();
3116 return;
3119 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3121 /* If the result is an aggregate that is being returned in one (or more)
3122 registers, load the registers here. The compiler currently can't handle
3123 copying a BLKmode value into registers. We could put this code in a
3124 more general area (for use by everyone instead of just function
3125 call/return), but until this feature is generally usable it is kept here
3126 (and in expand_call). The value must go into a pseudo in case there
3127 are cleanups that will clobber the real return register. */
3129 if (retval_rhs != 0
3130 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3131 && GET_CODE (result_rtl) == REG)
3133 int i;
3134 unsigned HOST_WIDE_INT bitpos, xbitpos;
3135 unsigned HOST_WIDE_INT big_endian_correction = 0;
3136 unsigned HOST_WIDE_INT bytes
3137 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3138 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3139 unsigned int bitsize
3140 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3141 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3142 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3143 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3144 enum machine_mode tmpmode, result_reg_mode;
3146 if (bytes == 0)
3148 expand_null_return ();
3149 return;
3152 /* Structures whose size is not a multiple of a word are aligned
3153 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3154 machine, this means we must skip the empty high order bytes when
3155 calculating the bit offset. */
3156 if (BYTES_BIG_ENDIAN
3157 && bytes % UNITS_PER_WORD)
3158 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3159 * BITS_PER_UNIT));
3161 /* Copy the structure BITSIZE bits at a time. */
3162 for (bitpos = 0, xbitpos = big_endian_correction;
3163 bitpos < bytes * BITS_PER_UNIT;
3164 bitpos += bitsize, xbitpos += bitsize)
3166 /* We need a new destination pseudo each time xbitpos is
3167 on a word boundary and when xbitpos == big_endian_correction
3168 (the first time through). */
3169 if (xbitpos % BITS_PER_WORD == 0
3170 || xbitpos == big_endian_correction)
3172 /* Generate an appropriate register. */
3173 dst = gen_reg_rtx (word_mode);
3174 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3176 /* Clear the destination before we move anything into it. */
3177 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3180 /* We need a new source operand each time bitpos is on a word
3181 boundary. */
3182 if (bitpos % BITS_PER_WORD == 0)
3183 src = operand_subword_force (result_val,
3184 bitpos / BITS_PER_WORD,
3185 BLKmode);
3187 /* Use bitpos for the source extraction (left justified) and
3188 xbitpos for the destination store (right justified). */
3189 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3190 extract_bit_field (src, bitsize,
3191 bitpos % BITS_PER_WORD, 1,
3192 NULL_RTX, word_mode, word_mode,
3193 BITS_PER_WORD),
3194 BITS_PER_WORD);
3197 /* Find the smallest integer mode large enough to hold the
3198 entire structure and use that mode instead of BLKmode
3199 on the USE insn for the return register. */
3200 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3201 tmpmode != VOIDmode;
3202 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3203 /* Have we found a large enough mode? */
3204 if (GET_MODE_SIZE (tmpmode) >= bytes)
3205 break;
3207 /* No suitable mode found. */
3208 if (tmpmode == VOIDmode)
3209 abort ();
3211 PUT_MODE (result_rtl, tmpmode);
3213 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3214 result_reg_mode = word_mode;
3215 else
3216 result_reg_mode = tmpmode;
3217 result_reg = gen_reg_rtx (result_reg_mode);
3219 emit_queue ();
3220 for (i = 0; i < n_regs; i++)
3221 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3222 result_pseudos[i]);
3224 if (tmpmode != result_reg_mode)
3225 result_reg = gen_lowpart (tmpmode, result_reg);
3227 expand_value_return (result_reg);
3229 else if (retval_rhs != 0
3230 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3231 && (GET_CODE (result_rtl) == REG
3232 || (GET_CODE (result_rtl) == PARALLEL)))
3234 /* Calculate the return value into a temporary (usually a pseudo
3235 reg). */
3236 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3237 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3239 val = assign_temp (nt, 0, 0, 1);
3240 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3241 val = force_not_mem (val);
3242 emit_queue ();
3243 /* Return the calculated value, doing cleanups first. */
3244 expand_value_return (val);
3246 else
3248 /* No cleanups or no hard reg used;
3249 calculate value into hard return reg. */
3250 expand_expr (retval, const0_rtx, VOIDmode, 0);
3251 emit_queue ();
3252 expand_value_return (result_rtl);
3256 /* Attempt to optimize a potential tail recursion call into a goto.
3257 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3258 where to place the jump to the tail recursion label.
3260 Return TRUE if the call was optimized into a goto. */
3263 optimize_tail_recursion (arguments, last_insn)
3264 tree arguments;
3265 rtx last_insn;
3267 /* Finish checking validity, and if valid emit code to set the
3268 argument variables for the new call. */
3269 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3271 if (tail_recursion_label == 0)
3273 tail_recursion_label = gen_label_rtx ();
3274 emit_label_after (tail_recursion_label,
3275 tail_recursion_reentry);
3277 emit_queue ();
3278 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3279 emit_barrier ();
3280 return 1;
3282 return 0;
3285 /* Emit code to alter this function's formal parms for a tail-recursive call.
3286 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3287 FORMALS is the chain of decls of formals.
3288 Return 1 if this can be done;
3289 otherwise return 0 and do not emit any code. */
3291 static int
3292 tail_recursion_args (actuals, formals)
3293 tree actuals, formals;
3295 tree a = actuals, f = formals;
3296 int i;
3297 rtx *argvec;
3299 /* Check that number and types of actuals are compatible
3300 with the formals. This is not always true in valid C code.
3301 Also check that no formal needs to be addressable
3302 and that all formals are scalars. */
3304 /* Also count the args. */
3306 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3308 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3309 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3310 return 0;
3311 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3312 return 0;
3314 if (a != 0 || f != 0)
3315 return 0;
3317 /* Compute all the actuals. */
3319 argvec = (rtx *) alloca (i * sizeof (rtx));
3321 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3322 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3324 /* Find which actual values refer to current values of previous formals.
3325 Copy each of them now, before any formal is changed. */
3327 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3329 int copy = 0;
3330 int j;
3331 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3332 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3334 copy = 1;
3335 break;
3337 if (copy)
3338 argvec[i] = copy_to_reg (argvec[i]);
3341 /* Store the values of the actuals into the formals. */
3343 for (f = formals, a = actuals, i = 0; f;
3344 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3346 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3347 emit_move_insn (DECL_RTL (f), argvec[i]);
3348 else
3350 rtx tmp = argvec[i];
3351 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
3352 promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3353 &unsignedp, 0);
3354 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3356 tmp = gen_reg_rtx (DECL_MODE (f));
3357 convert_move (tmp, argvec[i], unsignedp);
3359 convert_move (DECL_RTL (f), tmp, unsignedp);
3363 free_temp_slots ();
3364 return 1;
3367 /* Generate the RTL code for entering a binding contour.
3368 The variables are declared one by one, by calls to `expand_decl'.
3370 FLAGS is a bitwise or of the following flags:
3372 1 - Nonzero if this construct should be visible to
3373 `exit_something'.
3375 2 - Nonzero if this contour does not require a
3376 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3377 language-independent code should set this flag because they
3378 will not create corresponding BLOCK nodes. (There should be
3379 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3380 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3381 when expand_end_bindings is called.
3383 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3384 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3385 note. */
3387 void
3388 expand_start_bindings_and_block (flags, block)
3389 int flags;
3390 tree block;
3392 struct nesting *thisblock = ALLOC_NESTING ();
3393 rtx note;
3394 int exit_flag = ((flags & 1) != 0);
3395 int block_flag = ((flags & 2) == 0);
3397 /* If a BLOCK is supplied, then the caller should be requesting a
3398 NOTE_INSN_BLOCK_BEG note. */
3399 if (!block_flag && block)
3400 abort ();
3402 /* Create a note to mark the beginning of the block. */
3403 if (block_flag)
3405 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3406 NOTE_BLOCK (note) = block;
3408 else
3409 note = emit_note (NULL, NOTE_INSN_DELETED);
3411 /* Make an entry on block_stack for the block we are entering. */
3413 thisblock->desc = BLOCK_NESTING;
3414 thisblock->next = block_stack;
3415 thisblock->all = nesting_stack;
3416 thisblock->depth = ++nesting_depth;
3417 thisblock->data.block.stack_level = 0;
3418 thisblock->data.block.cleanups = 0;
3419 thisblock->data.block.exception_region = 0;
3420 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3422 thisblock->data.block.conditional_code = 0;
3423 thisblock->data.block.last_unconditional_cleanup = note;
3424 /* When we insert instructions after the last unconditional cleanup,
3425 we don't adjust last_insn. That means that a later add_insn will
3426 clobber the instructions we've just added. The easiest way to
3427 fix this is to just insert another instruction here, so that the
3428 instructions inserted after the last unconditional cleanup are
3429 never the last instruction. */
3430 emit_note (NULL, NOTE_INSN_DELETED);
3432 if (block_stack
3433 && !(block_stack->data.block.cleanups == NULL_TREE
3434 && block_stack->data.block.outer_cleanups == NULL_TREE))
3435 thisblock->data.block.outer_cleanups
3436 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3437 block_stack->data.block.outer_cleanups);
3438 else
3439 thisblock->data.block.outer_cleanups = 0;
3440 thisblock->data.block.label_chain = 0;
3441 thisblock->data.block.innermost_stack_block = stack_block_stack;
3442 thisblock->data.block.first_insn = note;
3443 thisblock->data.block.block_start_count = ++current_block_start_count;
3444 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3445 block_stack = thisblock;
3446 nesting_stack = thisblock;
3448 /* Make a new level for allocating stack slots. */
3449 push_temp_slots ();
3452 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3453 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3454 expand_expr are made. After we end the region, we know that all
3455 space for all temporaries that were created by TARGET_EXPRs will be
3456 destroyed and their space freed for reuse. */
3458 void
3459 expand_start_target_temps ()
3461 /* This is so that even if the result is preserved, the space
3462 allocated will be freed, as we know that it is no longer in use. */
3463 push_temp_slots ();
3465 /* Start a new binding layer that will keep track of all cleanup
3466 actions to be performed. */
3467 expand_start_bindings (2);
3469 target_temp_slot_level = temp_slot_level;
3472 void
3473 expand_end_target_temps ()
3475 expand_end_bindings (NULL_TREE, 0, 0);
3477 /* This is so that even if the result is preserved, the space
3478 allocated will be freed, as we know that it is no longer in use. */
3479 pop_temp_slots ();
3482 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3483 in question represents the outermost pair of curly braces (i.e. the "body
3484 block") of a function or method.
3486 For any BLOCK node representing a "body block" of a function or method, the
3487 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3488 represents the outermost (function) scope for the function or method (i.e.
3489 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3490 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3493 is_body_block (stmt)
3494 tree stmt;
3496 if (TREE_CODE (stmt) == BLOCK)
3498 tree parent = BLOCK_SUPERCONTEXT (stmt);
3500 if (parent && TREE_CODE (parent) == BLOCK)
3502 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3504 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3505 return 1;
3509 return 0;
3512 /* True if we are currently emitting insns in an area of output code
3513 that is controlled by a conditional expression. This is used by
3514 the cleanup handling code to generate conditional cleanup actions. */
3517 conditional_context ()
3519 return block_stack && block_stack->data.block.conditional_code;
3522 /* Return an opaque pointer to the current nesting level, so frontend code
3523 can check its own sanity. */
3525 struct nesting *
3526 current_nesting_level ()
3528 return cfun ? block_stack : 0;
3531 /* Emit a handler label for a nonlocal goto handler.
3532 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3534 static rtx
3535 expand_nl_handler_label (slot, before_insn)
3536 rtx slot, before_insn;
3538 rtx insns;
3539 rtx handler_label = gen_label_rtx ();
3541 /* Don't let cleanup_cfg delete the handler. */
3542 LABEL_PRESERVE_P (handler_label) = 1;
3544 start_sequence ();
3545 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3546 insns = get_insns ();
3547 end_sequence ();
3548 emit_insn_before (insns, before_insn);
3550 emit_label (handler_label);
3552 return handler_label;
3555 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3556 handler. */
3557 static void
3558 expand_nl_goto_receiver ()
3560 #ifdef HAVE_nonlocal_goto
3561 if (! HAVE_nonlocal_goto)
3562 #endif
3563 /* First adjust our frame pointer to its actual value. It was
3564 previously set to the start of the virtual area corresponding to
3565 the stacked variables when we branched here and now needs to be
3566 adjusted to the actual hardware fp value.
3568 Assignments are to virtual registers are converted by
3569 instantiate_virtual_regs into the corresponding assignment
3570 to the underlying register (fp in this case) that makes
3571 the original assignment true.
3572 So the following insn will actually be
3573 decrementing fp by STARTING_FRAME_OFFSET. */
3574 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3576 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3577 if (fixed_regs[ARG_POINTER_REGNUM])
3579 #ifdef ELIMINABLE_REGS
3580 /* If the argument pointer can be eliminated in favor of the
3581 frame pointer, we don't need to restore it. We assume here
3582 that if such an elimination is present, it can always be used.
3583 This is the case on all known machines; if we don't make this
3584 assumption, we do unnecessary saving on many machines. */
3585 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3586 size_t i;
3588 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3589 if (elim_regs[i].from == ARG_POINTER_REGNUM
3590 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3591 break;
3593 if (i == ARRAY_SIZE (elim_regs))
3594 #endif
3596 /* Now restore our arg pointer from the address at which it
3597 was saved in our stack frame. */
3598 emit_move_insn (virtual_incoming_args_rtx,
3599 copy_to_reg (get_arg_pointer_save_area (cfun)));
3602 #endif
3604 #ifdef HAVE_nonlocal_goto_receiver
3605 if (HAVE_nonlocal_goto_receiver)
3606 emit_insn (gen_nonlocal_goto_receiver ());
3607 #endif
3610 /* Make handlers for nonlocal gotos taking place in the function calls in
3611 block THISBLOCK. */
3613 static void
3614 expand_nl_goto_receivers (thisblock)
3615 struct nesting *thisblock;
3617 tree link;
3618 rtx afterward = gen_label_rtx ();
3619 rtx insns, slot;
3620 rtx label_list;
3621 int any_invalid;
3623 /* Record the handler address in the stack slot for that purpose,
3624 during this block, saving and restoring the outer value. */
3625 if (thisblock->next != 0)
3626 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3628 rtx save_receiver = gen_reg_rtx (Pmode);
3629 emit_move_insn (XEXP (slot, 0), save_receiver);
3631 start_sequence ();
3632 emit_move_insn (save_receiver, XEXP (slot, 0));
3633 insns = get_insns ();
3634 end_sequence ();
3635 emit_insn_before (insns, thisblock->data.block.first_insn);
3638 /* Jump around the handlers; they run only when specially invoked. */
3639 emit_jump (afterward);
3641 /* Make a separate handler for each label. */
3642 link = nonlocal_labels;
3643 slot = nonlocal_goto_handler_slots;
3644 label_list = NULL_RTX;
3645 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3646 /* Skip any labels we shouldn't be able to jump to from here,
3647 we generate one special handler for all of them below which just calls
3648 abort. */
3649 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3651 rtx lab;
3652 lab = expand_nl_handler_label (XEXP (slot, 0),
3653 thisblock->data.block.first_insn);
3654 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3656 expand_nl_goto_receiver ();
3658 /* Jump to the "real" nonlocal label. */
3659 expand_goto (TREE_VALUE (link));
3662 /* A second pass over all nonlocal labels; this time we handle those
3663 we should not be able to jump to at this point. */
3664 link = nonlocal_labels;
3665 slot = nonlocal_goto_handler_slots;
3666 any_invalid = 0;
3667 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3668 if (DECL_TOO_LATE (TREE_VALUE (link)))
3670 rtx lab;
3671 lab = expand_nl_handler_label (XEXP (slot, 0),
3672 thisblock->data.block.first_insn);
3673 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3674 any_invalid = 1;
3677 if (any_invalid)
3679 expand_nl_goto_receiver ();
3680 expand_builtin_trap ();
3683 nonlocal_goto_handler_labels = label_list;
3684 emit_label (afterward);
3687 /* Warn about any unused VARS (which may contain nodes other than
3688 VAR_DECLs, but such nodes are ignored). The nodes are connected
3689 via the TREE_CHAIN field. */
3691 void
3692 warn_about_unused_variables (vars)
3693 tree vars;
3695 tree decl;
3697 if (warn_unused_variable)
3698 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3699 if (TREE_CODE (decl) == VAR_DECL
3700 && ! TREE_USED (decl)
3701 && ! DECL_IN_SYSTEM_HEADER (decl)
3702 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3703 warning_with_decl (decl, "unused variable `%s'");
3706 /* Generate RTL code to terminate a binding contour.
3708 VARS is the chain of VAR_DECL nodes for the variables bound in this
3709 contour. There may actually be other nodes in this chain, but any
3710 nodes other than VAR_DECLS are ignored.
3712 MARK_ENDS is nonzero if we should put a note at the beginning
3713 and end of this binding contour.
3715 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3716 zero if we can jump into this contour only if it does not have a saved
3717 stack level, and negative if we are not to check for invalid use of
3718 labels (because the front end does that). */
3720 void
3721 expand_end_bindings (vars, mark_ends, dont_jump_in)
3722 tree vars;
3723 int mark_ends;
3724 int dont_jump_in;
3726 struct nesting *thisblock = block_stack;
3728 /* If any of the variables in this scope were not used, warn the
3729 user. */
3730 warn_about_unused_variables (vars);
3732 if (thisblock->exit_label)
3734 do_pending_stack_adjust ();
3735 emit_label (thisblock->exit_label);
3738 /* If necessary, make handlers for nonlocal gotos taking
3739 place in the function calls in this block. */
3740 if (function_call_count != 0 && nonlocal_labels
3741 /* Make handler for outermost block
3742 if there were any nonlocal gotos to this function. */
3743 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3744 /* Make handler for inner block if it has something
3745 special to do when you jump out of it. */
3746 : (thisblock->data.block.cleanups != 0
3747 || thisblock->data.block.stack_level != 0)))
3748 expand_nl_goto_receivers (thisblock);
3750 /* Don't allow jumping into a block that has a stack level.
3751 Cleanups are allowed, though. */
3752 if (dont_jump_in > 0
3753 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3755 struct label_chain *chain;
3757 /* Any labels in this block are no longer valid to go to.
3758 Mark them to cause an error message. */
3759 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3761 DECL_TOO_LATE (chain->label) = 1;
3762 /* If any goto without a fixup came to this label,
3763 that must be an error, because gotos without fixups
3764 come from outside all saved stack-levels. */
3765 if (TREE_ADDRESSABLE (chain->label))
3766 error_with_decl (chain->label,
3767 "label `%s' used before containing binding contour");
3771 /* Restore stack level in effect before the block
3772 (only if variable-size objects allocated). */
3773 /* Perform any cleanups associated with the block. */
3775 if (thisblock->data.block.stack_level != 0
3776 || thisblock->data.block.cleanups != 0)
3778 int reachable;
3779 rtx insn;
3781 /* Don't let cleanups affect ({...}) constructs. */
3782 int old_expr_stmts_for_value = expr_stmts_for_value;
3783 rtx old_last_expr_value = last_expr_value;
3784 tree old_last_expr_type = last_expr_type;
3785 expr_stmts_for_value = 0;
3787 /* Only clean up here if this point can actually be reached. */
3788 insn = get_last_insn ();
3789 if (GET_CODE (insn) == NOTE)
3790 insn = prev_nonnote_insn (insn);
3791 reachable = (! insn || GET_CODE (insn) != BARRIER);
3793 /* Do the cleanups. */
3794 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3795 if (reachable)
3796 do_pending_stack_adjust ();
3798 expr_stmts_for_value = old_expr_stmts_for_value;
3799 last_expr_value = old_last_expr_value;
3800 last_expr_type = old_last_expr_type;
3802 /* Restore the stack level. */
3804 if (reachable && thisblock->data.block.stack_level != 0)
3806 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3807 thisblock->data.block.stack_level, NULL_RTX);
3808 if (nonlocal_goto_handler_slots != 0)
3809 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3810 NULL_RTX);
3813 /* Any gotos out of this block must also do these things.
3814 Also report any gotos with fixups that came to labels in this
3815 level. */
3816 fixup_gotos (thisblock,
3817 thisblock->data.block.stack_level,
3818 thisblock->data.block.cleanups,
3819 thisblock->data.block.first_insn,
3820 dont_jump_in);
3823 /* Mark the beginning and end of the scope if requested.
3824 We do this now, after running cleanups on the variables
3825 just going out of scope, so they are in scope for their cleanups. */
3827 if (mark_ends)
3829 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3830 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3832 else
3833 /* Get rid of the beginning-mark if we don't make an end-mark. */
3834 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3836 /* Restore the temporary level of TARGET_EXPRs. */
3837 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3839 /* Restore block_stack level for containing block. */
3841 stack_block_stack = thisblock->data.block.innermost_stack_block;
3842 POPSTACK (block_stack);
3844 /* Pop the stack slot nesting and free any slots at this level. */
3845 pop_temp_slots ();
3848 /* Generate code to save the stack pointer at the start of the current block
3849 and set up to restore it on exit. */
3851 void
3852 save_stack_pointer ()
3854 struct nesting *thisblock = block_stack;
3856 if (thisblock->data.block.stack_level == 0)
3858 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3859 &thisblock->data.block.stack_level,
3860 thisblock->data.block.first_insn);
3861 stack_block_stack = thisblock;
3865 /* Generate RTL for the automatic variable declaration DECL.
3866 (Other kinds of declarations are simply ignored if seen here.) */
3868 void
3869 expand_decl (decl)
3870 tree decl;
3872 tree type;
3874 type = TREE_TYPE (decl);
3876 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3877 type in case this node is used in a reference. */
3878 if (TREE_CODE (decl) == CONST_DECL)
3880 DECL_MODE (decl) = TYPE_MODE (type);
3881 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3882 DECL_SIZE (decl) = TYPE_SIZE (type);
3883 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3884 return;
3887 /* Otherwise, only automatic variables need any expansion done. Static and
3888 external variables, and external functions, will be handled by
3889 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3890 nothing. PARM_DECLs are handled in `assign_parms'. */
3891 if (TREE_CODE (decl) != VAR_DECL)
3892 return;
3894 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3895 return;
3897 /* Create the RTL representation for the variable. */
3899 if (type == error_mark_node)
3900 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3902 else if (DECL_SIZE (decl) == 0)
3903 /* Variable with incomplete type. */
3905 rtx x;
3906 if (DECL_INITIAL (decl) == 0)
3907 /* Error message was already done; now avoid a crash. */
3908 x = gen_rtx_MEM (BLKmode, const0_rtx);
3909 else
3910 /* An initializer is going to decide the size of this array.
3911 Until we know the size, represent its address with a reg. */
3912 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3914 set_mem_attributes (x, decl, 1);
3915 SET_DECL_RTL (decl, x);
3917 else if (DECL_MODE (decl) != BLKmode
3918 /* If -ffloat-store, don't put explicit float vars
3919 into regs. */
3920 && !(flag_float_store
3921 && TREE_CODE (type) == REAL_TYPE)
3922 && ! TREE_THIS_VOLATILE (decl)
3923 && ! DECL_NONLOCAL (decl)
3924 && (DECL_REGISTER (decl) || optimize))
3926 /* Automatic variable that can go in a register. */
3927 int unsignedp = TREE_UNSIGNED (type);
3928 enum machine_mode reg_mode
3929 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3931 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3933 mark_user_reg (DECL_RTL (decl));
3935 if (POINTER_TYPE_P (type))
3936 mark_reg_pointer (DECL_RTL (decl),
3937 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3939 maybe_set_unchanging (DECL_RTL (decl), decl);
3941 /* If something wants our address, try to use ADDRESSOF. */
3942 if (TREE_ADDRESSABLE (decl))
3943 put_var_into_stack (decl, /*rescan=*/false);
3946 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3947 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3948 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3949 STACK_CHECK_MAX_VAR_SIZE)))
3951 /* Variable of fixed size that goes on the stack. */
3952 rtx oldaddr = 0;
3953 rtx addr;
3954 rtx x;
3956 /* If we previously made RTL for this decl, it must be an array
3957 whose size was determined by the initializer.
3958 The old address was a register; set that register now
3959 to the proper address. */
3960 if (DECL_RTL_SET_P (decl))
3962 if (GET_CODE (DECL_RTL (decl)) != MEM
3963 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3964 abort ();
3965 oldaddr = XEXP (DECL_RTL (decl), 0);
3968 /* Set alignment we actually gave this decl. */
3969 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3970 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3971 DECL_USER_ALIGN (decl) = 0;
3973 x = assign_temp (decl, 1, 1, 1);
3974 set_mem_attributes (x, decl, 1);
3975 SET_DECL_RTL (decl, x);
3977 if (oldaddr)
3979 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3980 if (addr != oldaddr)
3981 emit_move_insn (oldaddr, addr);
3984 else
3985 /* Dynamic-size object: must push space on the stack. */
3987 rtx address, size, x;
3989 /* Record the stack pointer on entry to block, if have
3990 not already done so. */
3991 do_pending_stack_adjust ();
3992 save_stack_pointer ();
3994 /* In function-at-a-time mode, variable_size doesn't expand this,
3995 so do it now. */
3996 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3997 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3998 const0_rtx, VOIDmode, 0);
4000 /* Compute the variable's size, in bytes. */
4001 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4002 free_temp_slots ();
4004 /* Allocate space on the stack for the variable. Note that
4005 DECL_ALIGN says how the variable is to be aligned and we
4006 cannot use it to conclude anything about the alignment of
4007 the size. */
4008 address = allocate_dynamic_stack_space (size, NULL_RTX,
4009 TYPE_ALIGN (TREE_TYPE (decl)));
4011 /* Reference the variable indirect through that rtx. */
4012 x = gen_rtx_MEM (DECL_MODE (decl), address);
4013 set_mem_attributes (x, decl, 1);
4014 SET_DECL_RTL (decl, x);
4017 /* Indicate the alignment we actually gave this variable. */
4018 #ifdef STACK_BOUNDARY
4019 DECL_ALIGN (decl) = STACK_BOUNDARY;
4020 #else
4021 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4022 #endif
4023 DECL_USER_ALIGN (decl) = 0;
4027 /* Emit code to perform the initialization of a declaration DECL. */
4029 void
4030 expand_decl_init (decl)
4031 tree decl;
4033 int was_used = TREE_USED (decl);
4035 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4036 for static decls. */
4037 if (TREE_CODE (decl) == CONST_DECL
4038 || TREE_STATIC (decl))
4039 return;
4041 /* Compute and store the initial value now. */
4043 push_temp_slots ();
4045 if (DECL_INITIAL (decl) == error_mark_node)
4047 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4049 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4050 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4051 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4052 0, 0);
4053 emit_queue ();
4055 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4057 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4058 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4059 emit_queue ();
4062 /* Don't let the initialization count as "using" the variable. */
4063 TREE_USED (decl) = was_used;
4065 /* Free any temporaries we made while initializing the decl. */
4066 preserve_temp_slots (NULL_RTX);
4067 free_temp_slots ();
4068 pop_temp_slots ();
4071 /* CLEANUP is an expression to be executed at exit from this binding contour;
4072 for example, in C++, it might call the destructor for this variable.
4074 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4075 CLEANUP multiple times, and have the correct semantics. This
4076 happens in exception handling, for gotos, returns, breaks that
4077 leave the current scope.
4079 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4080 that is not associated with any particular variable. */
4083 expand_decl_cleanup (decl, cleanup)
4084 tree decl, cleanup;
4086 struct nesting *thisblock;
4088 /* Error if we are not in any block. */
4089 if (cfun == 0 || block_stack == 0)
4090 return 0;
4092 thisblock = block_stack;
4094 /* Record the cleanup if there is one. */
4096 if (cleanup != 0)
4098 tree t;
4099 rtx seq;
4100 tree *cleanups = &thisblock->data.block.cleanups;
4101 int cond_context = conditional_context ();
4103 if (cond_context)
4105 rtx flag = gen_reg_rtx (word_mode);
4106 rtx set_flag_0;
4107 tree cond;
4109 start_sequence ();
4110 emit_move_insn (flag, const0_rtx);
4111 set_flag_0 = get_insns ();
4112 end_sequence ();
4114 thisblock->data.block.last_unconditional_cleanup
4115 = emit_insn_after (set_flag_0,
4116 thisblock->data.block.last_unconditional_cleanup);
4118 emit_move_insn (flag, const1_rtx);
4120 cond = build_decl (VAR_DECL, NULL_TREE,
4121 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4122 SET_DECL_RTL (cond, flag);
4124 /* Conditionalize the cleanup. */
4125 cleanup = build (COND_EXPR, void_type_node,
4126 (*lang_hooks.truthvalue_conversion) (cond),
4127 cleanup, integer_zero_node);
4128 cleanup = fold (cleanup);
4130 cleanups = &thisblock->data.block.cleanups;
4133 cleanup = unsave_expr (cleanup);
4135 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4137 if (! cond_context)
4138 /* If this block has a cleanup, it belongs in stack_block_stack. */
4139 stack_block_stack = thisblock;
4141 if (cond_context)
4143 start_sequence ();
4146 if (! using_eh_for_cleanups_p)
4147 TREE_ADDRESSABLE (t) = 1;
4148 else
4149 expand_eh_region_start ();
4151 if (cond_context)
4153 seq = get_insns ();
4154 end_sequence ();
4155 if (seq)
4156 thisblock->data.block.last_unconditional_cleanup
4157 = emit_insn_after (seq,
4158 thisblock->data.block.last_unconditional_cleanup);
4160 else
4162 thisblock->data.block.last_unconditional_cleanup
4163 = get_last_insn ();
4164 /* When we insert instructions after the last unconditional cleanup,
4165 we don't adjust last_insn. That means that a later add_insn will
4166 clobber the instructions we've just added. The easiest way to
4167 fix this is to just insert another instruction here, so that the
4168 instructions inserted after the last unconditional cleanup are
4169 never the last instruction. */
4170 emit_note (NULL, NOTE_INSN_DELETED);
4173 return 1;
4176 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4177 is thrown. */
4180 expand_decl_cleanup_eh (decl, cleanup, eh_only)
4181 tree decl, cleanup;
4182 int eh_only;
4184 int ret = expand_decl_cleanup (decl, cleanup);
4185 if (cleanup && ret)
4187 tree node = block_stack->data.block.cleanups;
4188 CLEANUP_EH_ONLY (node) = eh_only;
4190 return ret;
4193 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4194 DECL_ELTS is the list of elements that belong to DECL's type.
4195 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4197 void
4198 expand_anon_union_decl (decl, cleanup, decl_elts)
4199 tree decl, cleanup, decl_elts;
4201 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4202 rtx x;
4203 tree t;
4205 /* If any of the elements are addressable, so is the entire union. */
4206 for (t = decl_elts; t; t = TREE_CHAIN (t))
4207 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4209 TREE_ADDRESSABLE (decl) = 1;
4210 break;
4213 expand_decl (decl);
4214 expand_decl_cleanup (decl, cleanup);
4215 x = DECL_RTL (decl);
4217 /* Go through the elements, assigning RTL to each. */
4218 for (t = decl_elts; t; t = TREE_CHAIN (t))
4220 tree decl_elt = TREE_VALUE (t);
4221 tree cleanup_elt = TREE_PURPOSE (t);
4222 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4224 /* If any of the elements are addressable, so is the entire
4225 union. */
4226 if (TREE_USED (decl_elt))
4227 TREE_USED (decl) = 1;
4229 /* Propagate the union's alignment to the elements. */
4230 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4231 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4233 /* If the element has BLKmode and the union doesn't, the union is
4234 aligned such that the element doesn't need to have BLKmode, so
4235 change the element's mode to the appropriate one for its size. */
4236 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4237 DECL_MODE (decl_elt) = mode
4238 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4240 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4241 instead create a new MEM rtx with the proper mode. */
4242 if (GET_CODE (x) == MEM)
4244 if (mode == GET_MODE (x))
4245 SET_DECL_RTL (decl_elt, x);
4246 else
4247 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4249 else if (GET_CODE (x) == REG)
4251 if (mode == GET_MODE (x))
4252 SET_DECL_RTL (decl_elt, x);
4253 else
4254 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4256 else
4257 abort ();
4259 /* Record the cleanup if there is one. */
4261 if (cleanup != 0)
4262 thisblock->data.block.cleanups
4263 = tree_cons (decl_elt, cleanup_elt,
4264 thisblock->data.block.cleanups);
4268 /* Expand a list of cleanups LIST.
4269 Elements may be expressions or may be nested lists.
4271 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4272 goto and handle protection regions specially in that case.
4274 If REACHABLE, we emit code, otherwise just inform the exception handling
4275 code about this finalization. */
4277 static void
4278 expand_cleanups (list, in_fixup, reachable)
4279 tree list;
4280 int in_fixup;
4281 int reachable;
4283 tree tail;
4284 for (tail = list; tail; tail = TREE_CHAIN (tail))
4285 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4286 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
4287 else
4289 if (! in_fixup && using_eh_for_cleanups_p)
4290 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4292 if (reachable && !CLEANUP_EH_ONLY (tail))
4294 /* Cleanups may be run multiple times. For example,
4295 when exiting a binding contour, we expand the
4296 cleanups associated with that contour. When a goto
4297 within that binding contour has a target outside that
4298 contour, it will expand all cleanups from its scope to
4299 the target. Though the cleanups are expanded multiple
4300 times, the control paths are non-overlapping so the
4301 cleanups will not be executed twice. */
4303 /* We may need to protect from outer cleanups. */
4304 if (in_fixup && using_eh_for_cleanups_p)
4306 expand_eh_region_start ();
4308 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4310 expand_eh_region_end_fixup (TREE_VALUE (tail));
4312 else
4313 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4315 free_temp_slots ();
4320 /* Mark when the context we are emitting RTL for as a conditional
4321 context, so that any cleanup actions we register with
4322 expand_decl_init will be properly conditionalized when those
4323 cleanup actions are later performed. Must be called before any
4324 expression (tree) is expanded that is within a conditional context. */
4326 void
4327 start_cleanup_deferral ()
4329 /* block_stack can be NULL if we are inside the parameter list. It is
4330 OK to do nothing, because cleanups aren't possible here. */
4331 if (block_stack)
4332 ++block_stack->data.block.conditional_code;
4335 /* Mark the end of a conditional region of code. Because cleanup
4336 deferrals may be nested, we may still be in a conditional region
4337 after we end the currently deferred cleanups, only after we end all
4338 deferred cleanups, are we back in unconditional code. */
4340 void
4341 end_cleanup_deferral ()
4343 /* block_stack can be NULL if we are inside the parameter list. It is
4344 OK to do nothing, because cleanups aren't possible here. */
4345 if (block_stack)
4346 --block_stack->data.block.conditional_code;
4349 tree
4350 last_cleanup_this_contour ()
4352 if (block_stack == 0)
4353 return 0;
4355 return block_stack->data.block.cleanups;
4358 /* Return 1 if there are any pending cleanups at this point.
4359 If THIS_CONTOUR is nonzero, check the current contour as well.
4360 Otherwise, look only at the contours that enclose this one. */
4363 any_pending_cleanups (this_contour)
4364 int this_contour;
4366 struct nesting *block;
4368 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4369 return 0;
4371 if (this_contour && block_stack->data.block.cleanups != NULL)
4372 return 1;
4373 if (block_stack->data.block.cleanups == 0
4374 && block_stack->data.block.outer_cleanups == 0)
4375 return 0;
4377 for (block = block_stack->next; block; block = block->next)
4378 if (block->data.block.cleanups != 0)
4379 return 1;
4381 return 0;
4384 /* Enter a case (Pascal) or switch (C) statement.
4385 Push a block onto case_stack and nesting_stack
4386 to accumulate the case-labels that are seen
4387 and to record the labels generated for the statement.
4389 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4390 Otherwise, this construct is transparent for `exit_something'.
4392 EXPR is the index-expression to be dispatched on.
4393 TYPE is its nominal type. We could simply convert EXPR to this type,
4394 but instead we take short cuts. */
4396 void
4397 expand_start_case (exit_flag, expr, type, printname)
4398 int exit_flag;
4399 tree expr;
4400 tree type;
4401 const char *printname;
4403 struct nesting *thiscase = ALLOC_NESTING ();
4405 /* Make an entry on case_stack for the case we are entering. */
4407 thiscase->desc = CASE_NESTING;
4408 thiscase->next = case_stack;
4409 thiscase->all = nesting_stack;
4410 thiscase->depth = ++nesting_depth;
4411 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4412 thiscase->data.case_stmt.case_list = 0;
4413 thiscase->data.case_stmt.index_expr = expr;
4414 thiscase->data.case_stmt.nominal_type = type;
4415 thiscase->data.case_stmt.default_label = 0;
4416 thiscase->data.case_stmt.printname = printname;
4417 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4418 case_stack = thiscase;
4419 nesting_stack = thiscase;
4421 do_pending_stack_adjust ();
4422 emit_queue ();
4424 /* Make sure case_stmt.start points to something that won't
4425 need any transformation before expand_end_case. */
4426 if (GET_CODE (get_last_insn ()) != NOTE)
4427 emit_note (NULL, NOTE_INSN_DELETED);
4429 thiscase->data.case_stmt.start = get_last_insn ();
4431 start_cleanup_deferral ();
4434 /* Start a "dummy case statement" within which case labels are invalid
4435 and are not connected to any larger real case statement.
4436 This can be used if you don't want to let a case statement jump
4437 into the middle of certain kinds of constructs. */
4439 void
4440 expand_start_case_dummy ()
4442 struct nesting *thiscase = ALLOC_NESTING ();
4444 /* Make an entry on case_stack for the dummy. */
4446 thiscase->desc = CASE_NESTING;
4447 thiscase->next = case_stack;
4448 thiscase->all = nesting_stack;
4449 thiscase->depth = ++nesting_depth;
4450 thiscase->exit_label = 0;
4451 thiscase->data.case_stmt.case_list = 0;
4452 thiscase->data.case_stmt.start = 0;
4453 thiscase->data.case_stmt.nominal_type = 0;
4454 thiscase->data.case_stmt.default_label = 0;
4455 case_stack = thiscase;
4456 nesting_stack = thiscase;
4457 start_cleanup_deferral ();
4460 static void
4461 check_seenlabel ()
4463 /* If this is the first label, warn if any insns have been emitted. */
4464 if (case_stack->data.case_stmt.line_number_status >= 0)
4466 rtx insn;
4468 restore_line_number_status
4469 (case_stack->data.case_stmt.line_number_status);
4470 case_stack->data.case_stmt.line_number_status = -1;
4472 for (insn = case_stack->data.case_stmt.start;
4473 insn;
4474 insn = NEXT_INSN (insn))
4476 if (GET_CODE (insn) == CODE_LABEL)
4477 break;
4478 if (GET_CODE (insn) != NOTE
4479 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4482 insn = PREV_INSN (insn);
4483 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4485 /* If insn is zero, then there must have been a syntax error. */
4486 if (insn)
4487 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4488 NOTE_LINE_NUMBER (insn),
4489 "unreachable code at beginning of %s",
4490 case_stack->data.case_stmt.printname);
4491 break;
4497 /* Accumulate one case or default label inside a case or switch statement.
4498 VALUE is the value of the case (a null pointer, for a default label).
4499 The function CONVERTER, when applied to arguments T and V,
4500 converts the value V to the type T.
4502 If not currently inside a case or switch statement, return 1 and do
4503 nothing. The caller will print a language-specific error message.
4504 If VALUE is a duplicate or overlaps, return 2 and do nothing
4505 except store the (first) duplicate node in *DUPLICATE.
4506 If VALUE is out of range, return 3 and do nothing.
4507 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4508 Return 0 on success.
4510 Extended to handle range statements. */
4513 pushcase (value, converter, label, duplicate)
4514 tree value;
4515 tree (*converter) PARAMS ((tree, tree));
4516 tree label;
4517 tree *duplicate;
4519 tree index_type;
4520 tree nominal_type;
4522 /* Fail if not inside a real case statement. */
4523 if (! (case_stack && case_stack->data.case_stmt.start))
4524 return 1;
4526 if (stack_block_stack
4527 && stack_block_stack->depth > case_stack->depth)
4528 return 5;
4530 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4531 nominal_type = case_stack->data.case_stmt.nominal_type;
4533 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4534 if (index_type == error_mark_node)
4535 return 0;
4537 /* Convert VALUE to the type in which the comparisons are nominally done. */
4538 if (value != 0)
4539 value = (*converter) (nominal_type, value);
4541 check_seenlabel ();
4543 /* Fail if this value is out of range for the actual type of the index
4544 (which may be narrower than NOMINAL_TYPE). */
4545 if (value != 0
4546 && (TREE_CONSTANT_OVERFLOW (value)
4547 || ! int_fits_type_p (value, index_type)))
4548 return 3;
4550 return add_case_node (value, value, label, duplicate);
4553 /* Like pushcase but this case applies to all values between VALUE1 and
4554 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4555 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4556 starts at VALUE1 and ends at the highest value of the index type.
4557 If both are NULL, this case applies to all values.
4559 The return value is the same as that of pushcase but there is one
4560 additional error code: 4 means the specified range was empty. */
4563 pushcase_range (value1, value2, converter, label, duplicate)
4564 tree value1, value2;
4565 tree (*converter) PARAMS ((tree, tree));
4566 tree label;
4567 tree *duplicate;
4569 tree index_type;
4570 tree nominal_type;
4572 /* Fail if not inside a real case statement. */
4573 if (! (case_stack && case_stack->data.case_stmt.start))
4574 return 1;
4576 if (stack_block_stack
4577 && stack_block_stack->depth > case_stack->depth)
4578 return 5;
4580 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4581 nominal_type = case_stack->data.case_stmt.nominal_type;
4583 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4584 if (index_type == error_mark_node)
4585 return 0;
4587 check_seenlabel ();
4589 /* Convert VALUEs to type in which the comparisons are nominally done
4590 and replace any unspecified value with the corresponding bound. */
4591 if (value1 == 0)
4592 value1 = TYPE_MIN_VALUE (index_type);
4593 if (value2 == 0)
4594 value2 = TYPE_MAX_VALUE (index_type);
4596 /* Fail if the range is empty. Do this before any conversion since
4597 we want to allow out-of-range empty ranges. */
4598 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4599 return 4;
4601 /* If the max was unbounded, use the max of the nominal_type we are
4602 converting to. Do this after the < check above to suppress false
4603 positives. */
4604 if (value2 == 0)
4605 value2 = TYPE_MAX_VALUE (nominal_type);
4607 value1 = (*converter) (nominal_type, value1);
4608 value2 = (*converter) (nominal_type, value2);
4610 /* Fail if these values are out of range. */
4611 if (TREE_CONSTANT_OVERFLOW (value1)
4612 || ! int_fits_type_p (value1, index_type))
4613 return 3;
4615 if (TREE_CONSTANT_OVERFLOW (value2)
4616 || ! int_fits_type_p (value2, index_type))
4617 return 3;
4619 return add_case_node (value1, value2, label, duplicate);
4622 /* Do the actual insertion of a case label for pushcase and pushcase_range
4623 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4624 slowdown for large switch statements. */
4627 add_case_node (low, high, label, duplicate)
4628 tree low, high;
4629 tree label;
4630 tree *duplicate;
4632 struct case_node *p, **q, *r;
4634 /* If there's no HIGH value, then this is not a case range; it's
4635 just a simple case label. But that's just a degenerate case
4636 range. */
4637 if (!high)
4638 high = low;
4640 /* Handle default labels specially. */
4641 if (!high && !low)
4643 if (case_stack->data.case_stmt.default_label != 0)
4645 *duplicate = case_stack->data.case_stmt.default_label;
4646 return 2;
4648 case_stack->data.case_stmt.default_label = label;
4649 expand_label (label);
4650 return 0;
4653 q = &case_stack->data.case_stmt.case_list;
4654 p = *q;
4656 while ((r = *q))
4658 p = r;
4660 /* Keep going past elements distinctly greater than HIGH. */
4661 if (tree_int_cst_lt (high, p->low))
4662 q = &p->left;
4664 /* or distinctly less than LOW. */
4665 else if (tree_int_cst_lt (p->high, low))
4666 q = &p->right;
4668 else
4670 /* We have an overlap; this is an error. */
4671 *duplicate = p->code_label;
4672 return 2;
4676 /* Add this label to the chain, and succeed. */
4678 r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
4679 r->low = low;
4681 /* If the bounds are equal, turn this into the one-value case. */
4682 if (tree_int_cst_equal (low, high))
4683 r->high = r->low;
4684 else
4685 r->high = high;
4687 r->code_label = label;
4688 expand_label (label);
4690 *q = r;
4691 r->parent = p;
4692 r->left = 0;
4693 r->right = 0;
4694 r->balance = 0;
4696 while (p)
4698 struct case_node *s;
4700 if (r == p->left)
4702 int b;
4704 if (! (b = p->balance))
4705 /* Growth propagation from left side. */
4706 p->balance = -1;
4707 else if (b < 0)
4709 if (r->balance < 0)
4711 /* R-Rotation */
4712 if ((p->left = s = r->right))
4713 s->parent = p;
4715 r->right = p;
4716 p->balance = 0;
4717 r->balance = 0;
4718 s = p->parent;
4719 p->parent = r;
4721 if ((r->parent = s))
4723 if (s->left == p)
4724 s->left = r;
4725 else
4726 s->right = r;
4728 else
4729 case_stack->data.case_stmt.case_list = r;
4731 else
4732 /* r->balance == +1 */
4734 /* LR-Rotation */
4736 int b2;
4737 struct case_node *t = r->right;
4739 if ((p->left = s = t->right))
4740 s->parent = p;
4742 t->right = p;
4743 if ((r->right = s = t->left))
4744 s->parent = r;
4746 t->left = r;
4747 b = t->balance;
4748 b2 = b < 0;
4749 p->balance = b2;
4750 b2 = -b2 - b;
4751 r->balance = b2;
4752 t->balance = 0;
4753 s = p->parent;
4754 p->parent = t;
4755 r->parent = t;
4757 if ((t->parent = s))
4759 if (s->left == p)
4760 s->left = t;
4761 else
4762 s->right = t;
4764 else
4765 case_stack->data.case_stmt.case_list = t;
4767 break;
4770 else
4772 /* p->balance == +1; growth of left side balances the node. */
4773 p->balance = 0;
4774 break;
4777 else
4778 /* r == p->right */
4780 int b;
4782 if (! (b = p->balance))
4783 /* Growth propagation from right side. */
4784 p->balance++;
4785 else if (b > 0)
4787 if (r->balance > 0)
4789 /* L-Rotation */
4791 if ((p->right = s = r->left))
4792 s->parent = p;
4794 r->left = p;
4795 p->balance = 0;
4796 r->balance = 0;
4797 s = p->parent;
4798 p->parent = r;
4799 if ((r->parent = s))
4801 if (s->left == p)
4802 s->left = r;
4803 else
4804 s->right = r;
4807 else
4808 case_stack->data.case_stmt.case_list = r;
4811 else
4812 /* r->balance == -1 */
4814 /* RL-Rotation */
4815 int b2;
4816 struct case_node *t = r->left;
4818 if ((p->right = s = t->left))
4819 s->parent = p;
4821 t->left = p;
4823 if ((r->left = s = t->right))
4824 s->parent = r;
4826 t->right = r;
4827 b = t->balance;
4828 b2 = b < 0;
4829 r->balance = b2;
4830 b2 = -b2 - b;
4831 p->balance = b2;
4832 t->balance = 0;
4833 s = p->parent;
4834 p->parent = t;
4835 r->parent = t;
4837 if ((t->parent = s))
4839 if (s->left == p)
4840 s->left = t;
4841 else
4842 s->right = t;
4845 else
4846 case_stack->data.case_stmt.case_list = t;
4848 break;
4850 else
4852 /* p->balance == -1; growth of right side balances the node. */
4853 p->balance = 0;
4854 break;
4858 r = p;
4859 p = p->parent;
4862 return 0;
4865 /* Returns the number of possible values of TYPE.
4866 Returns -1 if the number is unknown, variable, or if the number does not
4867 fit in a HOST_WIDE_INT.
4868 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4869 do not increase monotonically (there may be duplicates);
4870 to 1 if the values increase monotonically, but not always by 1;
4871 otherwise sets it to 0. */
4873 HOST_WIDE_INT
4874 all_cases_count (type, sparseness)
4875 tree type;
4876 int *sparseness;
4878 tree t;
4879 HOST_WIDE_INT count, minval, lastval;
4881 *sparseness = 0;
4883 switch (TREE_CODE (type))
4885 case BOOLEAN_TYPE:
4886 count = 2;
4887 break;
4889 case CHAR_TYPE:
4890 count = 1 << BITS_PER_UNIT;
4891 break;
4893 default:
4894 case INTEGER_TYPE:
4895 if (TYPE_MAX_VALUE (type) != 0
4896 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4897 TYPE_MIN_VALUE (type))))
4898 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4899 convert (type, integer_zero_node))))
4900 && host_integerp (t, 1))
4901 count = tree_low_cst (t, 1);
4902 else
4903 return -1;
4904 break;
4906 case ENUMERAL_TYPE:
4907 /* Don't waste time with enumeral types with huge values. */
4908 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4909 || TYPE_MAX_VALUE (type) == 0
4910 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4911 return -1;
4913 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4914 count = 0;
4916 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4918 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4920 if (*sparseness == 2 || thisval <= lastval)
4921 *sparseness = 2;
4922 else if (thisval != minval + count)
4923 *sparseness = 1;
4925 lastval = thisval;
4926 count++;
4930 return count;
4933 #define BITARRAY_TEST(ARRAY, INDEX) \
4934 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4935 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4936 #define BITARRAY_SET(ARRAY, INDEX) \
4937 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4938 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4940 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4941 with the case values we have seen, assuming the case expression
4942 has the given TYPE.
4943 SPARSENESS is as determined by all_cases_count.
4945 The time needed is proportional to COUNT, unless
4946 SPARSENESS is 2, in which case quadratic time is needed. */
4948 void
4949 mark_seen_cases (type, cases_seen, count, sparseness)
4950 tree type;
4951 unsigned char *cases_seen;
4952 HOST_WIDE_INT count;
4953 int sparseness;
4955 tree next_node_to_try = NULL_TREE;
4956 HOST_WIDE_INT next_node_offset = 0;
4958 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4959 tree val = make_node (INTEGER_CST);
4961 TREE_TYPE (val) = type;
4962 if (! root)
4963 /* Do nothing. */
4965 else if (sparseness == 2)
4967 tree t;
4968 unsigned HOST_WIDE_INT xlo;
4970 /* This less efficient loop is only needed to handle
4971 duplicate case values (multiple enum constants
4972 with the same value). */
4973 TREE_TYPE (val) = TREE_TYPE (root->low);
4974 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4975 t = TREE_CHAIN (t), xlo++)
4977 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4978 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4979 n = root;
4982 /* Keep going past elements distinctly greater than VAL. */
4983 if (tree_int_cst_lt (val, n->low))
4984 n = n->left;
4986 /* or distinctly less than VAL. */
4987 else if (tree_int_cst_lt (n->high, val))
4988 n = n->right;
4990 else
4992 /* We have found a matching range. */
4993 BITARRAY_SET (cases_seen, xlo);
4994 break;
4997 while (n);
5000 else
5002 if (root->left)
5003 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5005 for (n = root; n; n = n->right)
5007 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5008 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5009 while (! tree_int_cst_lt (n->high, val))
5011 /* Calculate (into xlo) the "offset" of the integer (val).
5012 The element with lowest value has offset 0, the next smallest
5013 element has offset 1, etc. */
5015 unsigned HOST_WIDE_INT xlo;
5016 HOST_WIDE_INT xhi;
5017 tree t;
5019 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5021 /* The TYPE_VALUES will be in increasing order, so
5022 starting searching where we last ended. */
5023 t = next_node_to_try;
5024 xlo = next_node_offset;
5025 xhi = 0;
5026 for (;;)
5028 if (t == NULL_TREE)
5030 t = TYPE_VALUES (type);
5031 xlo = 0;
5033 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5035 next_node_to_try = TREE_CHAIN (t);
5036 next_node_offset = xlo + 1;
5037 break;
5039 xlo++;
5040 t = TREE_CHAIN (t);
5041 if (t == next_node_to_try)
5043 xlo = -1;
5044 break;
5048 else
5050 t = TYPE_MIN_VALUE (type);
5051 if (t)
5052 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5053 &xlo, &xhi);
5054 else
5055 xlo = xhi = 0;
5056 add_double (xlo, xhi,
5057 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5058 &xlo, &xhi);
5061 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5062 BITARRAY_SET (cases_seen, xlo);
5064 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5065 1, 0,
5066 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5072 /* Given a switch statement with an expression that is an enumeration
5073 type, warn if any of the enumeration type's literals are not
5074 covered by the case expressions of the switch. Also, warn if there
5075 are any extra switch cases that are *not* elements of the
5076 enumerated type.
5078 Historical note:
5080 At one stage this function would: ``If all enumeration literals
5081 were covered by the case expressions, turn one of the expressions
5082 into the default expression since it should not be possible to fall
5083 through such a switch.''
5085 That code has since been removed as: ``This optimization is
5086 disabled because it causes valid programs to fail. ANSI C does not
5087 guarantee that an expression with enum type will have a value that
5088 is the same as one of the enumeration literals.'' */
5090 void
5091 check_for_full_enumeration_handling (type)
5092 tree type;
5094 struct case_node *n;
5095 tree chain;
5097 /* True iff the selector type is a numbered set mode. */
5098 int sparseness = 0;
5100 /* The number of possible selector values. */
5101 HOST_WIDE_INT size;
5103 /* For each possible selector value. a one iff it has been matched
5104 by a case value alternative. */
5105 unsigned char *cases_seen;
5107 /* The allocated size of cases_seen, in chars. */
5108 HOST_WIDE_INT bytes_needed;
5110 size = all_cases_count (type, &sparseness);
5111 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5113 if (size > 0 && size < 600000
5114 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5115 this optimization if we don't have enough memory rather than
5116 aborting, as xmalloc would do. */
5117 && (cases_seen =
5118 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5120 HOST_WIDE_INT i;
5121 tree v = TYPE_VALUES (type);
5123 /* The time complexity of this code is normally O(N), where
5124 N being the number of members in the enumerated type.
5125 However, if type is an ENUMERAL_TYPE whose values do not
5126 increase monotonically, O(N*log(N)) time may be needed. */
5128 mark_seen_cases (type, cases_seen, size, sparseness);
5130 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5131 if (BITARRAY_TEST (cases_seen, i) == 0)
5132 warning ("enumeration value `%s' not handled in switch",
5133 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5135 free (cases_seen);
5138 /* Now we go the other way around; we warn if there are case
5139 expressions that don't correspond to enumerators. This can
5140 occur since C and C++ don't enforce type-checking of
5141 assignments to enumeration variables. */
5143 if (case_stack->data.case_stmt.case_list
5144 && case_stack->data.case_stmt.case_list->left)
5145 case_stack->data.case_stmt.case_list
5146 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5147 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5149 for (chain = TYPE_VALUES (type);
5150 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5151 chain = TREE_CHAIN (chain))
5154 if (!chain)
5156 if (TYPE_NAME (type) == 0)
5157 warning ("case value `%ld' not in enumerated type",
5158 (long) TREE_INT_CST_LOW (n->low));
5159 else
5160 warning ("case value `%ld' not in enumerated type `%s'",
5161 (long) TREE_INT_CST_LOW (n->low),
5162 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5163 == IDENTIFIER_NODE)
5164 ? TYPE_NAME (type)
5165 : DECL_NAME (TYPE_NAME (type))));
5167 if (!tree_int_cst_equal (n->low, n->high))
5169 for (chain = TYPE_VALUES (type);
5170 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5171 chain = TREE_CHAIN (chain))
5174 if (!chain)
5176 if (TYPE_NAME (type) == 0)
5177 warning ("case value `%ld' not in enumerated type",
5178 (long) TREE_INT_CST_LOW (n->high));
5179 else
5180 warning ("case value `%ld' not in enumerated type `%s'",
5181 (long) TREE_INT_CST_LOW (n->high),
5182 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5183 == IDENTIFIER_NODE)
5184 ? TYPE_NAME (type)
5185 : DECL_NAME (TYPE_NAME (type))));
5192 /* Maximum number of case bit tests. */
5193 #define MAX_CASE_BIT_TESTS 3
5195 /* By default, enable case bit tests on targets with ashlsi3. */
5196 #ifndef CASE_USE_BIT_TESTS
5197 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
5198 != CODE_FOR_nothing)
5199 #endif
5202 /* A case_bit_test represents a set of case nodes that may be
5203 selected from using a bit-wise comparison. HI and LO hold
5204 the integer to be tested against, LABEL contains the label
5205 to jump to upon success and BITS counts the number of case
5206 nodes handled by this test, typically the number of bits
5207 set in HI:LO. */
5209 struct case_bit_test
5211 HOST_WIDE_INT hi;
5212 HOST_WIDE_INT lo;
5213 rtx label;
5214 int bits;
5217 /* Determine whether "1 << x" is relatively cheap in word_mode. */
5219 static bool lshift_cheap_p ()
5221 static bool init = false;
5222 static bool cheap = true;
5224 if (!init)
5226 rtx reg = gen_rtx_REG (word_mode, 10000);
5227 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
5228 cheap = cost < COSTS_N_INSNS (3);
5229 init = true;
5232 return cheap;
5235 /* Comparison function for qsort to order bit tests by decreasing
5236 number of case nodes, i.e. the node with the most cases gets
5237 tested first. */
5239 static int case_bit_test_cmp (p1, p2)
5240 const void *p1;
5241 const void *p2;
5243 const struct case_bit_test *d1 = p1;
5244 const struct case_bit_test *d2 = p2;
5246 return d2->bits - d1->bits;
5249 /* Expand a switch statement by a short sequence of bit-wise
5250 comparisons. "switch(x)" is effectively converted into
5251 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
5252 integer constants.
5254 INDEX_EXPR is the value being switched on, which is of
5255 type INDEX_TYPE. MINVAL is the lowest case value of in
5256 the case nodes, of INDEX_TYPE type, and RANGE is highest
5257 value minus MINVAL, also of type INDEX_TYPE. NODES is
5258 the set of case nodes, and DEFAULT_LABEL is the label to
5259 branch to should none of the cases match.
5261 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
5262 node targets. */
5264 static void
5265 emit_case_bit_tests (index_type, index_expr, minval, range,
5266 nodes, default_label)
5267 tree index_type, index_expr, minval, range;
5268 case_node_ptr nodes;
5269 rtx default_label;
5271 struct case_bit_test test[MAX_CASE_BIT_TESTS];
5272 enum machine_mode mode;
5273 rtx expr, index, label;
5274 unsigned int i,j,lo,hi;
5275 struct case_node *n;
5276 unsigned int count;
5278 count = 0;
5279 for (n = nodes; n; n = n->right)
5281 label = label_rtx (n->code_label);
5282 for (i = 0; i < count; i++)
5283 if (same_case_target_p (label, test[i].label))
5284 break;
5286 if (i == count)
5288 if (count >= MAX_CASE_BIT_TESTS)
5289 abort ();
5290 test[i].hi = 0;
5291 test[i].lo = 0;
5292 test[i].label = label;
5293 test[i].bits = 1;
5294 count++;
5296 else
5297 test[i].bits++;
5299 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5300 n->low, minval)), 1);
5301 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5302 n->high, minval)), 1);
5303 for (j = lo; j <= hi; j++)
5304 if (j >= HOST_BITS_PER_WIDE_INT)
5305 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
5306 else
5307 test[i].lo |= (HOST_WIDE_INT) 1 << j;
5310 qsort (test, count, sizeof(*test), case_bit_test_cmp);
5312 index_expr = fold (build (MINUS_EXPR, index_type,
5313 convert (index_type, index_expr),
5314 convert (index_type, minval)));
5315 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5316 emit_queue ();
5317 index = protect_from_queue (index, 0);
5318 do_pending_stack_adjust ();
5320 mode = TYPE_MODE (index_type);
5321 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
5322 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
5323 default_label);
5325 index = convert_to_mode (word_mode, index, 0);
5326 index = expand_binop (word_mode, ashl_optab, const1_rtx,
5327 index, NULL_RTX, 1, OPTAB_WIDEN);
5329 for (i = 0; i < count; i++)
5331 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
5332 expr = expand_binop (word_mode, and_optab, index, expr,
5333 NULL_RTX, 1, OPTAB_WIDEN);
5334 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
5335 word_mode, 1, test[i].label);
5338 emit_jump (default_label);
5341 /* Terminate a case (Pascal) or switch (C) statement
5342 in which ORIG_INDEX is the expression to be tested.
5343 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5344 type as given in the source before any compiler conversions.
5345 Generate the code to test it and jump to the right place. */
5347 void
5348 expand_end_case_type (orig_index, orig_type)
5349 tree orig_index, orig_type;
5351 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5352 rtx default_label = 0;
5353 struct case_node *n, *m;
5354 unsigned int count, uniq;
5355 rtx index;
5356 rtx table_label;
5357 int ncases;
5358 rtx *labelvec;
5359 int i;
5360 rtx before_case, end, lab;
5361 struct nesting *thiscase = case_stack;
5362 tree index_expr, index_type;
5363 bool exit_done = false;
5364 int unsignedp;
5366 /* Don't crash due to previous errors. */
5367 if (thiscase == NULL)
5368 return;
5370 index_expr = thiscase->data.case_stmt.index_expr;
5371 index_type = TREE_TYPE (index_expr);
5372 unsignedp = TREE_UNSIGNED (index_type);
5373 if (orig_type == NULL)
5374 orig_type = TREE_TYPE (orig_index);
5376 do_pending_stack_adjust ();
5378 /* This might get a spurious warning in the presence of a syntax error;
5379 it could be fixed by moving the call to check_seenlabel after the
5380 check for error_mark_node, and copying the code of check_seenlabel that
5381 deals with case_stack->data.case_stmt.line_number_status /
5382 restore_line_number_status in front of the call to end_cleanup_deferral;
5383 However, this might miss some useful warnings in the presence of
5384 non-syntax errors. */
5385 check_seenlabel ();
5387 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5388 if (index_type != error_mark_node)
5390 /* If the switch expression was an enumerated type, check that
5391 exactly all enumeration literals are covered by the cases.
5392 The check is made when -Wswitch was specified and there is no
5393 default case, or when -Wswitch-enum was specified. */
5394 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5395 || warn_switch_enum)
5396 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5397 && TREE_CODE (index_expr) != INTEGER_CST)
5398 check_for_full_enumeration_handling (orig_type);
5400 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5401 warning ("switch missing default case");
5403 /* If we don't have a default-label, create one here,
5404 after the body of the switch. */
5405 if (thiscase->data.case_stmt.default_label == 0)
5407 thiscase->data.case_stmt.default_label
5408 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5409 /* Share the exit label if possible. */
5410 if (thiscase->exit_label)
5412 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
5413 thiscase->exit_label);
5414 exit_done = true;
5416 expand_label (thiscase->data.case_stmt.default_label);
5418 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5420 before_case = get_last_insn ();
5422 if (thiscase->data.case_stmt.case_list
5423 && thiscase->data.case_stmt.case_list->left)
5424 thiscase->data.case_stmt.case_list
5425 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5427 /* Simplify the case-list before we count it. */
5428 group_case_nodes (thiscase->data.case_stmt.case_list);
5429 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5430 default_label);
5432 /* Get upper and lower bounds of case values.
5433 Also convert all the case values to the index expr's data type. */
5435 uniq = 0;
5436 count = 0;
5437 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5439 /* Check low and high label values are integers. */
5440 if (TREE_CODE (n->low) != INTEGER_CST)
5441 abort ();
5442 if (TREE_CODE (n->high) != INTEGER_CST)
5443 abort ();
5445 n->low = convert (index_type, n->low);
5446 n->high = convert (index_type, n->high);
5448 /* Count the elements and track the largest and smallest
5449 of them (treating them as signed even if they are not). */
5450 if (count++ == 0)
5452 minval = n->low;
5453 maxval = n->high;
5455 else
5457 if (INT_CST_LT (n->low, minval))
5458 minval = n->low;
5459 if (INT_CST_LT (maxval, n->high))
5460 maxval = n->high;
5462 /* A range counts double, since it requires two compares. */
5463 if (! tree_int_cst_equal (n->low, n->high))
5464 count++;
5466 /* Count the number of unique case node targets. */
5467 uniq++;
5468 lab = label_rtx (n->code_label);
5469 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
5470 if (same_case_target_p (label_rtx (m->code_label), lab))
5472 uniq--;
5473 break;
5477 /* Compute span of values. */
5478 if (count != 0)
5479 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5481 end_cleanup_deferral ();
5483 if (count == 0)
5485 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5486 emit_queue ();
5487 emit_jump (default_label);
5490 /* Try implementing this switch statement by a short sequence of
5491 bit-wise comparisons. However, we let the binary-tree case
5492 below handle constant index expressions. */
5493 else if (CASE_USE_BIT_TESTS
5494 && ! TREE_CONSTANT (index_expr)
5495 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
5496 && compare_tree_int (range, 0) > 0
5497 && lshift_cheap_p ()
5498 && ((uniq == 1 && count >= 3)
5499 || (uniq == 2 && count >= 5)
5500 || (uniq == 3 && count >= 6)))
5502 /* Optimize the case where all the case values fit in a
5503 word without having to subtract MINVAL. In this case,
5504 we can optimize away the subtraction. */
5505 if (compare_tree_int (minval, 0) > 0
5506 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
5508 minval = integer_zero_node;
5509 range = maxval;
5511 emit_case_bit_tests (index_type, index_expr, minval, range,
5512 thiscase->data.case_stmt.case_list,
5513 default_label);
5516 /* If range of values is much bigger than number of values,
5517 make a sequence of conditional branches instead of a dispatch.
5518 If the switch-index is a constant, do it this way
5519 because we can optimize it. */
5521 else if (count < case_values_threshold ()
5522 || compare_tree_int (range, 10 * count) > 0
5523 /* RANGE may be signed, and really large ranges will show up
5524 as negative numbers. */
5525 || compare_tree_int (range, 0) < 0
5526 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5527 || flag_pic
5528 #endif
5529 || TREE_CONSTANT (index_expr))
5531 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5533 /* If the index is a short or char that we do not have
5534 an insn to handle comparisons directly, convert it to
5535 a full integer now, rather than letting each comparison
5536 generate the conversion. */
5538 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5539 && ! have_insn_for (COMPARE, GET_MODE (index)))
5541 enum machine_mode wider_mode;
5542 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5543 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5544 if (have_insn_for (COMPARE, wider_mode))
5546 index = convert_to_mode (wider_mode, index, unsignedp);
5547 break;
5551 emit_queue ();
5552 do_pending_stack_adjust ();
5554 index = protect_from_queue (index, 0);
5555 if (GET_CODE (index) == MEM)
5556 index = copy_to_reg (index);
5557 if (GET_CODE (index) == CONST_INT
5558 || TREE_CODE (index_expr) == INTEGER_CST)
5560 /* Make a tree node with the proper constant value
5561 if we don't already have one. */
5562 if (TREE_CODE (index_expr) != INTEGER_CST)
5564 index_expr
5565 = build_int_2 (INTVAL (index),
5566 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5567 index_expr = convert (index_type, index_expr);
5570 /* For constant index expressions we need only
5571 issue an unconditional branch to the appropriate
5572 target code. The job of removing any unreachable
5573 code is left to the optimisation phase if the
5574 "-O" option is specified. */
5575 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5576 if (! tree_int_cst_lt (index_expr, n->low)
5577 && ! tree_int_cst_lt (n->high, index_expr))
5578 break;
5580 if (n)
5581 emit_jump (label_rtx (n->code_label));
5582 else
5583 emit_jump (default_label);
5585 else
5587 /* If the index expression is not constant we generate
5588 a binary decision tree to select the appropriate
5589 target code. This is done as follows:
5591 The list of cases is rearranged into a binary tree,
5592 nearly optimal assuming equal probability for each case.
5594 The tree is transformed into RTL, eliminating
5595 redundant test conditions at the same time.
5597 If program flow could reach the end of the
5598 decision tree an unconditional jump to the
5599 default code is emitted. */
5601 use_cost_table
5602 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5603 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5604 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5605 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5606 default_label, index_type);
5607 emit_jump_if_reachable (default_label);
5610 else
5612 table_label = gen_label_rtx ();
5613 if (! try_casesi (index_type, index_expr, minval, range,
5614 table_label, default_label))
5616 index_type = thiscase->data.case_stmt.nominal_type;
5618 /* Index jumptables from zero for suitable values of
5619 minval to avoid a subtraction. */
5620 if (! optimize_size
5621 && compare_tree_int (minval, 0) > 0
5622 && compare_tree_int (minval, 3) < 0)
5624 minval = integer_zero_node;
5625 range = maxval;
5628 if (! try_tablejump (index_type, index_expr, minval, range,
5629 table_label, default_label))
5630 abort ();
5633 /* Get table of labels to jump to, in order of case index. */
5635 ncases = tree_low_cst (range, 0) + 1;
5636 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5637 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5639 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5641 /* Compute the low and high bounds relative to the minimum
5642 value since that should fit in a HOST_WIDE_INT while the
5643 actual values may not. */
5644 HOST_WIDE_INT i_low
5645 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5646 n->low, minval)), 1);
5647 HOST_WIDE_INT i_high
5648 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5649 n->high, minval)), 1);
5650 HOST_WIDE_INT i;
5652 for (i = i_low; i <= i_high; i ++)
5653 labelvec[i]
5654 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5657 /* Fill in the gaps with the default. */
5658 for (i = 0; i < ncases; i++)
5659 if (labelvec[i] == 0)
5660 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5662 /* Output the table */
5663 emit_label (table_label);
5665 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5666 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5667 gen_rtx_LABEL_REF (Pmode, table_label),
5668 gen_rtvec_v (ncases, labelvec),
5669 const0_rtx, const0_rtx));
5670 else
5671 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5672 gen_rtvec_v (ncases, labelvec)));
5674 /* If the case insn drops through the table,
5675 after the table we must jump to the default-label.
5676 Otherwise record no drop-through after the table. */
5677 #ifdef CASE_DROPS_THROUGH
5678 emit_jump (default_label);
5679 #else
5680 emit_barrier ();
5681 #endif
5684 before_case = NEXT_INSN (before_case);
5685 end = get_last_insn ();
5686 if (squeeze_notes (&before_case, &end))
5687 abort ();
5688 reorder_insns (before_case, end,
5689 thiscase->data.case_stmt.start);
5691 else
5692 end_cleanup_deferral ();
5694 if (thiscase->exit_label && !exit_done)
5695 emit_label (thiscase->exit_label);
5697 POPSTACK (case_stack);
5699 free_temp_slots ();
5702 /* Convert the tree NODE into a list linked by the right field, with the left
5703 field zeroed. RIGHT is used for recursion; it is a list to be placed
5704 rightmost in the resulting list. */
5706 static struct case_node *
5707 case_tree2list (node, right)
5708 struct case_node *node, *right;
5710 struct case_node *left;
5712 if (node->right)
5713 right = case_tree2list (node->right, right);
5715 node->right = right;
5716 if ((left = node->left))
5718 node->left = 0;
5719 return case_tree2list (left, node);
5722 return node;
5725 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5727 static void
5728 do_jump_if_equal (op1, op2, label, unsignedp)
5729 rtx op1, op2, label;
5730 int unsignedp;
5732 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5734 if (INTVAL (op1) == INTVAL (op2))
5735 emit_jump (label);
5737 else
5738 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5739 (GET_MODE (op1) == VOIDmode
5740 ? GET_MODE (op2) : GET_MODE (op1)),
5741 unsignedp, label);
5744 /* Not all case values are encountered equally. This function
5745 uses a heuristic to weight case labels, in cases where that
5746 looks like a reasonable thing to do.
5748 Right now, all we try to guess is text, and we establish the
5749 following weights:
5751 chars above space: 16
5752 digits: 16
5753 default: 12
5754 space, punct: 8
5755 tab: 4
5756 newline: 2
5757 other "\" chars: 1
5758 remaining chars: 0
5760 If we find any cases in the switch that are not either -1 or in the range
5761 of valid ASCII characters, or are control characters other than those
5762 commonly used with "\", don't treat this switch scanning text.
5764 Return 1 if these nodes are suitable for cost estimation, otherwise
5765 return 0. */
5767 static int
5768 estimate_case_costs (node)
5769 case_node_ptr node;
5771 tree min_ascii = integer_minus_one_node;
5772 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5773 case_node_ptr n;
5774 int i;
5776 /* If we haven't already made the cost table, make it now. Note that the
5777 lower bound of the table is -1, not zero. */
5779 if (! cost_table_initialized)
5781 cost_table_initialized = 1;
5783 for (i = 0; i < 128; i++)
5785 if (ISALNUM (i))
5786 COST_TABLE (i) = 16;
5787 else if (ISPUNCT (i))
5788 COST_TABLE (i) = 8;
5789 else if (ISCNTRL (i))
5790 COST_TABLE (i) = -1;
5793 COST_TABLE (' ') = 8;
5794 COST_TABLE ('\t') = 4;
5795 COST_TABLE ('\0') = 4;
5796 COST_TABLE ('\n') = 2;
5797 COST_TABLE ('\f') = 1;
5798 COST_TABLE ('\v') = 1;
5799 COST_TABLE ('\b') = 1;
5802 /* See if all the case expressions look like text. It is text if the
5803 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5804 as signed arithmetic since we don't want to ever access cost_table with a
5805 value less than -1. Also check that none of the constants in a range
5806 are strange control characters. */
5808 for (n = node; n; n = n->right)
5810 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5811 return 0;
5813 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5814 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5815 if (COST_TABLE (i) < 0)
5816 return 0;
5819 /* All interesting values are within the range of interesting
5820 ASCII characters. */
5821 return 1;
5824 /* Determine whether two case labels branch to the same target. */
5826 static bool
5827 same_case_target_p (l1, l2)
5828 rtx l1, l2;
5830 rtx i1, i2;
5832 if (l1 == l2)
5833 return true;
5835 i1 = next_real_insn (l1);
5836 i2 = next_real_insn (l2);
5837 if (i1 == i2)
5838 return true;
5840 if (i1 && simplejump_p (i1))
5842 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5845 if (i2 && simplejump_p (i2))
5847 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5849 return l1 == l2;
5852 /* Delete nodes that branch to the default label from a list of
5853 case nodes. Eg. case 5: default: becomes just default: */
5855 static void
5856 strip_default_case_nodes (prev, deflab)
5857 case_node_ptr *prev;
5858 rtx deflab;
5860 case_node_ptr ptr;
5862 while (*prev)
5864 ptr = *prev;
5865 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5866 *prev = ptr->right;
5867 else
5868 prev = &ptr->right;
5872 /* Scan an ordered list of case nodes
5873 combining those with consecutive values or ranges.
5875 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5877 static void
5878 group_case_nodes (head)
5879 case_node_ptr head;
5881 case_node_ptr node = head;
5883 while (node)
5885 rtx lab = label_rtx (node->code_label);
5886 case_node_ptr np = node;
5888 /* Try to group the successors of NODE with NODE. */
5889 while (((np = np->right) != 0)
5890 /* Do they jump to the same place? */
5891 && same_case_target_p (label_rtx (np->code_label), lab)
5892 /* Are their ranges consecutive? */
5893 && tree_int_cst_equal (np->low,
5894 fold (build (PLUS_EXPR,
5895 TREE_TYPE (node->high),
5896 node->high,
5897 integer_one_node)))
5898 /* An overflow is not consecutive. */
5899 && tree_int_cst_lt (node->high,
5900 fold (build (PLUS_EXPR,
5901 TREE_TYPE (node->high),
5902 node->high,
5903 integer_one_node))))
5905 node->high = np->high;
5907 /* NP is the first node after NODE which can't be grouped with it.
5908 Delete the nodes in between, and move on to that node. */
5909 node->right = np;
5910 node = np;
5914 /* Take an ordered list of case nodes
5915 and transform them into a near optimal binary tree,
5916 on the assumption that any target code selection value is as
5917 likely as any other.
5919 The transformation is performed by splitting the ordered
5920 list into two equal sections plus a pivot. The parts are
5921 then attached to the pivot as left and right branches. Each
5922 branch is then transformed recursively. */
5924 static void
5925 balance_case_nodes (head, parent)
5926 case_node_ptr *head;
5927 case_node_ptr parent;
5929 case_node_ptr np;
5931 np = *head;
5932 if (np)
5934 int cost = 0;
5935 int i = 0;
5936 int ranges = 0;
5937 case_node_ptr *npp;
5938 case_node_ptr left;
5940 /* Count the number of entries on branch. Also count the ranges. */
5942 while (np)
5944 if (!tree_int_cst_equal (np->low, np->high))
5946 ranges++;
5947 if (use_cost_table)
5948 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5951 if (use_cost_table)
5952 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5954 i++;
5955 np = np->right;
5958 if (i > 2)
5960 /* Split this list if it is long enough for that to help. */
5961 npp = head;
5962 left = *npp;
5963 if (use_cost_table)
5965 /* Find the place in the list that bisects the list's total cost,
5966 Here I gets half the total cost. */
5967 int n_moved = 0;
5968 i = (cost + 1) / 2;
5969 while (1)
5971 /* Skip nodes while their cost does not reach that amount. */
5972 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5973 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5974 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5975 if (i <= 0)
5976 break;
5977 npp = &(*npp)->right;
5978 n_moved += 1;
5980 if (n_moved == 0)
5982 /* Leave this branch lopsided, but optimize left-hand
5983 side and fill in `parent' fields for right-hand side. */
5984 np = *head;
5985 np->parent = parent;
5986 balance_case_nodes (&np->left, np);
5987 for (; np->right; np = np->right)
5988 np->right->parent = np;
5989 return;
5992 /* If there are just three nodes, split at the middle one. */
5993 else if (i == 3)
5994 npp = &(*npp)->right;
5995 else
5997 /* Find the place in the list that bisects the list's total cost,
5998 where ranges count as 2.
5999 Here I gets half the total cost. */
6000 i = (i + ranges + 1) / 2;
6001 while (1)
6003 /* Skip nodes while their cost does not reach that amount. */
6004 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
6005 i--;
6006 i--;
6007 if (i <= 0)
6008 break;
6009 npp = &(*npp)->right;
6012 *head = np = *npp;
6013 *npp = 0;
6014 np->parent = parent;
6015 np->left = left;
6017 /* Optimize each of the two split parts. */
6018 balance_case_nodes (&np->left, np);
6019 balance_case_nodes (&np->right, np);
6021 else
6023 /* Else leave this branch as one level,
6024 but fill in `parent' fields. */
6025 np = *head;
6026 np->parent = parent;
6027 for (; np->right; np = np->right)
6028 np->right->parent = np;
6033 /* Search the parent sections of the case node tree
6034 to see if a test for the lower bound of NODE would be redundant.
6035 INDEX_TYPE is the type of the index expression.
6037 The instructions to generate the case decision tree are
6038 output in the same order as nodes are processed so it is
6039 known that if a parent node checks the range of the current
6040 node minus one that the current node is bounded at its lower
6041 span. Thus the test would be redundant. */
6043 static int
6044 node_has_low_bound (node, index_type)
6045 case_node_ptr node;
6046 tree index_type;
6048 tree low_minus_one;
6049 case_node_ptr pnode;
6051 /* If the lower bound of this node is the lowest value in the index type,
6052 we need not test it. */
6054 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6055 return 1;
6057 /* If this node has a left branch, the value at the left must be less
6058 than that at this node, so it cannot be bounded at the bottom and
6059 we need not bother testing any further. */
6061 if (node->left)
6062 return 0;
6064 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6065 node->low, integer_one_node));
6067 /* If the subtraction above overflowed, we can't verify anything.
6068 Otherwise, look for a parent that tests our value - 1. */
6070 if (! tree_int_cst_lt (low_minus_one, node->low))
6071 return 0;
6073 for (pnode = node->parent; pnode; pnode = pnode->parent)
6074 if (tree_int_cst_equal (low_minus_one, pnode->high))
6075 return 1;
6077 return 0;
6080 /* Search the parent sections of the case node tree
6081 to see if a test for the upper bound of NODE would be redundant.
6082 INDEX_TYPE is the type of the index expression.
6084 The instructions to generate the case decision tree are
6085 output in the same order as nodes are processed so it is
6086 known that if a parent node checks the range of the current
6087 node plus one that the current node is bounded at its upper
6088 span. Thus the test would be redundant. */
6090 static int
6091 node_has_high_bound (node, index_type)
6092 case_node_ptr node;
6093 tree index_type;
6095 tree high_plus_one;
6096 case_node_ptr pnode;
6098 /* If there is no upper bound, obviously no test is needed. */
6100 if (TYPE_MAX_VALUE (index_type) == NULL)
6101 return 1;
6103 /* If the upper bound of this node is the highest value in the type
6104 of the index expression, we need not test against it. */
6106 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6107 return 1;
6109 /* If this node has a right branch, the value at the right must be greater
6110 than that at this node, so it cannot be bounded at the top and
6111 we need not bother testing any further. */
6113 if (node->right)
6114 return 0;
6116 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6117 node->high, integer_one_node));
6119 /* If the addition above overflowed, we can't verify anything.
6120 Otherwise, look for a parent that tests our value + 1. */
6122 if (! tree_int_cst_lt (node->high, high_plus_one))
6123 return 0;
6125 for (pnode = node->parent; pnode; pnode = pnode->parent)
6126 if (tree_int_cst_equal (high_plus_one, pnode->low))
6127 return 1;
6129 return 0;
6132 /* Search the parent sections of the
6133 case node tree to see if both tests for the upper and lower
6134 bounds of NODE would be redundant. */
6136 static int
6137 node_is_bounded (node, index_type)
6138 case_node_ptr node;
6139 tree index_type;
6141 return (node_has_low_bound (node, index_type)
6142 && node_has_high_bound (node, index_type));
6145 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6147 static void
6148 emit_jump_if_reachable (label)
6149 rtx label;
6151 if (GET_CODE (get_last_insn ()) != BARRIER)
6152 emit_jump (label);
6155 /* Emit step-by-step code to select a case for the value of INDEX.
6156 The thus generated decision tree follows the form of the
6157 case-node binary tree NODE, whose nodes represent test conditions.
6158 INDEX_TYPE is the type of the index of the switch.
6160 Care is taken to prune redundant tests from the decision tree
6161 by detecting any boundary conditions already checked by
6162 emitted rtx. (See node_has_high_bound, node_has_low_bound
6163 and node_is_bounded, above.)
6165 Where the test conditions can be shown to be redundant we emit
6166 an unconditional jump to the target code. As a further
6167 optimization, the subordinates of a tree node are examined to
6168 check for bounded nodes. In this case conditional and/or
6169 unconditional jumps as a result of the boundary check for the
6170 current node are arranged to target the subordinates associated
6171 code for out of bound conditions on the current node.
6173 We can assume that when control reaches the code generated here,
6174 the index value has already been compared with the parents
6175 of this node, and determined to be on the same side of each parent
6176 as this node is. Thus, if this node tests for the value 51,
6177 and a parent tested for 52, we don't need to consider
6178 the possibility of a value greater than 51. If another parent
6179 tests for the value 50, then this node need not test anything. */
6181 static void
6182 emit_case_nodes (index, node, default_label, index_type)
6183 rtx index;
6184 case_node_ptr node;
6185 rtx default_label;
6186 tree index_type;
6188 /* If INDEX has an unsigned type, we must make unsigned branches. */
6189 int unsignedp = TREE_UNSIGNED (index_type);
6190 enum machine_mode mode = GET_MODE (index);
6191 enum machine_mode imode = TYPE_MODE (index_type);
6193 /* See if our parents have already tested everything for us.
6194 If they have, emit an unconditional jump for this node. */
6195 if (node_is_bounded (node, index_type))
6196 emit_jump (label_rtx (node->code_label));
6198 else if (tree_int_cst_equal (node->low, node->high))
6200 /* Node is single valued. First see if the index expression matches
6201 this node and then check our children, if any. */
6203 do_jump_if_equal (index,
6204 convert_modes (mode, imode,
6205 expand_expr (node->low, NULL_RTX,
6206 VOIDmode, 0),
6207 unsignedp),
6208 label_rtx (node->code_label), unsignedp);
6210 if (node->right != 0 && node->left != 0)
6212 /* This node has children on both sides.
6213 Dispatch to one side or the other
6214 by comparing the index value with this node's value.
6215 If one subtree is bounded, check that one first,
6216 so we can avoid real branches in the tree. */
6218 if (node_is_bounded (node->right, index_type))
6220 emit_cmp_and_jump_insns (index,
6221 convert_modes
6222 (mode, imode,
6223 expand_expr (node->high, NULL_RTX,
6224 VOIDmode, 0),
6225 unsignedp),
6226 GT, NULL_RTX, mode, unsignedp,
6227 label_rtx (node->right->code_label));
6228 emit_case_nodes (index, node->left, default_label, index_type);
6231 else if (node_is_bounded (node->left, index_type))
6233 emit_cmp_and_jump_insns (index,
6234 convert_modes
6235 (mode, imode,
6236 expand_expr (node->high, NULL_RTX,
6237 VOIDmode, 0),
6238 unsignedp),
6239 LT, NULL_RTX, mode, unsignedp,
6240 label_rtx (node->left->code_label));
6241 emit_case_nodes (index, node->right, default_label, index_type);
6244 else
6246 /* Neither node is bounded. First distinguish the two sides;
6247 then emit the code for one side at a time. */
6249 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6251 /* See if the value is on the right. */
6252 emit_cmp_and_jump_insns (index,
6253 convert_modes
6254 (mode, imode,
6255 expand_expr (node->high, NULL_RTX,
6256 VOIDmode, 0),
6257 unsignedp),
6258 GT, NULL_RTX, mode, unsignedp,
6259 label_rtx (test_label));
6261 /* Value must be on the left.
6262 Handle the left-hand subtree. */
6263 emit_case_nodes (index, node->left, default_label, index_type);
6264 /* If left-hand subtree does nothing,
6265 go to default. */
6266 emit_jump_if_reachable (default_label);
6268 /* Code branches here for the right-hand subtree. */
6269 expand_label (test_label);
6270 emit_case_nodes (index, node->right, default_label, index_type);
6274 else if (node->right != 0 && node->left == 0)
6276 /* Here we have a right child but no left so we issue conditional
6277 branch to default and process the right child.
6279 Omit the conditional branch to default if we it avoid only one
6280 right child; it costs too much space to save so little time. */
6282 if (node->right->right || node->right->left
6283 || !tree_int_cst_equal (node->right->low, node->right->high))
6285 if (!node_has_low_bound (node, index_type))
6287 emit_cmp_and_jump_insns (index,
6288 convert_modes
6289 (mode, imode,
6290 expand_expr (node->high, NULL_RTX,
6291 VOIDmode, 0),
6292 unsignedp),
6293 LT, NULL_RTX, mode, unsignedp,
6294 default_label);
6297 emit_case_nodes (index, node->right, default_label, index_type);
6299 else
6300 /* We cannot process node->right normally
6301 since we haven't ruled out the numbers less than
6302 this node's value. So handle node->right explicitly. */
6303 do_jump_if_equal (index,
6304 convert_modes
6305 (mode, imode,
6306 expand_expr (node->right->low, NULL_RTX,
6307 VOIDmode, 0),
6308 unsignedp),
6309 label_rtx (node->right->code_label), unsignedp);
6312 else if (node->right == 0 && node->left != 0)
6314 /* Just one subtree, on the left. */
6315 if (node->left->left || node->left->right
6316 || !tree_int_cst_equal (node->left->low, node->left->high))
6318 if (!node_has_high_bound (node, index_type))
6320 emit_cmp_and_jump_insns (index,
6321 convert_modes
6322 (mode, imode,
6323 expand_expr (node->high, NULL_RTX,
6324 VOIDmode, 0),
6325 unsignedp),
6326 GT, NULL_RTX, mode, unsignedp,
6327 default_label);
6330 emit_case_nodes (index, node->left, default_label, index_type);
6332 else
6333 /* We cannot process node->left normally
6334 since we haven't ruled out the numbers less than
6335 this node's value. So handle node->left explicitly. */
6336 do_jump_if_equal (index,
6337 convert_modes
6338 (mode, imode,
6339 expand_expr (node->left->low, NULL_RTX,
6340 VOIDmode, 0),
6341 unsignedp),
6342 label_rtx (node->left->code_label), unsignedp);
6345 else
6347 /* Node is a range. These cases are very similar to those for a single
6348 value, except that we do not start by testing whether this node
6349 is the one to branch to. */
6351 if (node->right != 0 && node->left != 0)
6353 /* Node has subtrees on both sides.
6354 If the right-hand subtree is bounded,
6355 test for it first, since we can go straight there.
6356 Otherwise, we need to make a branch in the control structure,
6357 then handle the two subtrees. */
6358 tree test_label = 0;
6360 if (node_is_bounded (node->right, index_type))
6361 /* Right hand node is fully bounded so we can eliminate any
6362 testing and branch directly to the target code. */
6363 emit_cmp_and_jump_insns (index,
6364 convert_modes
6365 (mode, imode,
6366 expand_expr (node->high, NULL_RTX,
6367 VOIDmode, 0),
6368 unsignedp),
6369 GT, NULL_RTX, mode, unsignedp,
6370 label_rtx (node->right->code_label));
6371 else
6373 /* Right hand node requires testing.
6374 Branch to a label where we will handle it later. */
6376 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6377 emit_cmp_and_jump_insns (index,
6378 convert_modes
6379 (mode, imode,
6380 expand_expr (node->high, NULL_RTX,
6381 VOIDmode, 0),
6382 unsignedp),
6383 GT, NULL_RTX, mode, unsignedp,
6384 label_rtx (test_label));
6387 /* Value belongs to this node or to the left-hand subtree. */
6389 emit_cmp_and_jump_insns (index,
6390 convert_modes
6391 (mode, imode,
6392 expand_expr (node->low, NULL_RTX,
6393 VOIDmode, 0),
6394 unsignedp),
6395 GE, NULL_RTX, mode, unsignedp,
6396 label_rtx (node->code_label));
6398 /* Handle the left-hand subtree. */
6399 emit_case_nodes (index, node->left, default_label, index_type);
6401 /* If right node had to be handled later, do that now. */
6403 if (test_label)
6405 /* If the left-hand subtree fell through,
6406 don't let it fall into the right-hand subtree. */
6407 emit_jump_if_reachable (default_label);
6409 expand_label (test_label);
6410 emit_case_nodes (index, node->right, default_label, index_type);
6414 else if (node->right != 0 && node->left == 0)
6416 /* Deal with values to the left of this node,
6417 if they are possible. */
6418 if (!node_has_low_bound (node, index_type))
6420 emit_cmp_and_jump_insns (index,
6421 convert_modes
6422 (mode, imode,
6423 expand_expr (node->low, NULL_RTX,
6424 VOIDmode, 0),
6425 unsignedp),
6426 LT, NULL_RTX, mode, unsignedp,
6427 default_label);
6430 /* Value belongs to this node or to the right-hand subtree. */
6432 emit_cmp_and_jump_insns (index,
6433 convert_modes
6434 (mode, imode,
6435 expand_expr (node->high, NULL_RTX,
6436 VOIDmode, 0),
6437 unsignedp),
6438 LE, NULL_RTX, mode, unsignedp,
6439 label_rtx (node->code_label));
6441 emit_case_nodes (index, node->right, default_label, index_type);
6444 else if (node->right == 0 && node->left != 0)
6446 /* Deal with values to the right of this node,
6447 if they are possible. */
6448 if (!node_has_high_bound (node, index_type))
6450 emit_cmp_and_jump_insns (index,
6451 convert_modes
6452 (mode, imode,
6453 expand_expr (node->high, NULL_RTX,
6454 VOIDmode, 0),
6455 unsignedp),
6456 GT, NULL_RTX, mode, unsignedp,
6457 default_label);
6460 /* Value belongs to this node or to the left-hand subtree. */
6462 emit_cmp_and_jump_insns (index,
6463 convert_modes
6464 (mode, imode,
6465 expand_expr (node->low, NULL_RTX,
6466 VOIDmode, 0),
6467 unsignedp),
6468 GE, NULL_RTX, mode, unsignedp,
6469 label_rtx (node->code_label));
6471 emit_case_nodes (index, node->left, default_label, index_type);
6474 else
6476 /* Node has no children so we check low and high bounds to remove
6477 redundant tests. Only one of the bounds can exist,
6478 since otherwise this node is bounded--a case tested already. */
6479 int high_bound = node_has_high_bound (node, index_type);
6480 int low_bound = node_has_low_bound (node, index_type);
6482 if (!high_bound && low_bound)
6484 emit_cmp_and_jump_insns (index,
6485 convert_modes
6486 (mode, imode,
6487 expand_expr (node->high, NULL_RTX,
6488 VOIDmode, 0),
6489 unsignedp),
6490 GT, NULL_RTX, mode, unsignedp,
6491 default_label);
6494 else if (!low_bound && high_bound)
6496 emit_cmp_and_jump_insns (index,
6497 convert_modes
6498 (mode, imode,
6499 expand_expr (node->low, NULL_RTX,
6500 VOIDmode, 0),
6501 unsignedp),
6502 LT, NULL_RTX, mode, unsignedp,
6503 default_label);
6505 else if (!low_bound && !high_bound)
6507 /* Widen LOW and HIGH to the same width as INDEX. */
6508 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6509 tree low = build1 (CONVERT_EXPR, type, node->low);
6510 tree high = build1 (CONVERT_EXPR, type, node->high);
6511 rtx low_rtx, new_index, new_bound;
6513 /* Instead of doing two branches, emit one unsigned branch for
6514 (index-low) > (high-low). */
6515 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6516 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6517 NULL_RTX, unsignedp,
6518 OPTAB_WIDEN);
6519 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6520 high, low)),
6521 NULL_RTX, mode, 0);
6523 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6524 mode, 1, default_label);
6527 emit_jump (label_rtx (node->code_label));
6532 #include "gt-stmt.h"