1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
49 #include "hard-reg-set.h"
58 #define obstack_chunk_alloc xmalloc
59 #define obstack_chunk_free free
60 struct obstack stmt_obstack
;
62 /* Assume that case vectors are not pc-relative. */
63 #ifndef CASE_VECTOR_PC_RELATIVE
64 #define CASE_VECTOR_PC_RELATIVE 0
68 /* Functions and data structures for expanding case statements. */
70 /* Case label structure, used to hold info on labels within case
71 statements. We handle "range" labels; for a single-value label
72 as in C, the high and low limits are the same.
74 An AVL tree of case nodes is initially created, and later transformed
75 to a list linked via the RIGHT fields in the nodes. Nodes with
76 higher case values are later in the list.
78 Switch statements can be output in one of two forms. A branch table
79 is used if there are more than a few labels and the labels are dense
80 within the range between the smallest and largest case value. If a
81 branch table is used, no further manipulations are done with the case
84 The alternative to the use of a branch table is to generate a series
85 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
86 and PARENT fields to hold a binary tree. Initially the tree is
87 totally unbalanced, with everything on the right. We balance the tree
88 with nodes on the left having lower case values than the parent
89 and nodes on the right having higher values. We then output the tree
94 struct case_node
*left
; /* Left son in binary tree */
95 struct case_node
*right
; /* Right son in binary tree; also node chain */
96 struct case_node
*parent
; /* Parent of node in binary tree */
97 tree low
; /* Lowest index value for this label */
98 tree high
; /* Highest index value for this label */
99 tree code_label
; /* Label to jump to when node matches */
103 typedef struct case_node case_node
;
104 typedef struct case_node
*case_node_ptr
;
106 /* These are used by estimate_case_costs and balance_case_nodes. */
108 /* This must be a signed type, and non-ANSI compilers lack signed char. */
109 static short *cost_table
;
110 static int use_cost_table
;
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `loop_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
140 struct nesting
*next
;
145 /* For conds (if-then and if-then-else statements). */
148 /* Label for the end of the if construct.
149 There is none if EXITFLAG was not set
150 and no `else' has been seen yet. */
152 /* Label for the end of this alternative.
153 This may be the end of the if or the next else/elseif. */
159 /* Label at the top of the loop; place to loop back to. */
161 /* Label at the end of the whole construct. */
163 /* Label before a jump that branches to the end of the whole
164 construct. This is where destructors go if any. */
166 /* Label for `continue' statement to jump to;
167 this is in front of the stepper of the loop. */
170 /* For variable binding contours. */
173 /* Sequence number of this binding contour within the function,
174 in order of entry. */
175 int block_start_count
;
176 /* Nonzero => value to restore stack to on exit. */
178 /* The NOTE that starts this contour.
179 Used by expand_goto to check whether the destination
180 is within each contour or not. */
182 /* Innermost containing binding contour that has a stack level. */
183 struct nesting
*innermost_stack_block
;
184 /* List of cleanups to be run on exit from this contour.
185 This is a list of expressions to be evaluated.
186 The TREE_PURPOSE of each link is the ..._DECL node
187 which the cleanup pertains to. */
189 /* List of cleanup-lists of blocks containing this block,
190 as they were at the locus where this block appears.
191 There is an element for each containing block,
192 ordered innermost containing block first.
193 The tail of this list can be 0,
194 if all remaining elements would be empty lists.
195 The element's TREE_VALUE is the cleanup-list of that block,
196 which may be null. */
198 /* Chain of labels defined inside this binding contour.
199 For contours that have stack levels or cleanups. */
200 struct label_chain
*label_chain
;
201 /* Number of function calls seen, as of start of this block. */
202 int n_function_calls
;
203 /* Nonzero if this is associated with a EH region. */
204 int exception_region
;
205 /* The saved target_temp_slot_level from our outer block.
206 We may reset target_temp_slot_level to be the level of
207 this block, if that is done, target_temp_slot_level
208 reverts to the saved target_temp_slot_level at the very
210 int block_target_temp_slot_level
;
211 /* True if we are currently emitting insns in an area of
212 output code that is controlled by a conditional
213 expression. This is used by the cleanup handling code to
214 generate conditional cleanup actions. */
215 int conditional_code
;
216 /* A place to move the start of the exception region for any
217 of the conditional cleanups, must be at the end or after
218 the start of the last unconditional cleanup, and before any
219 conditional branch points. */
220 rtx last_unconditional_cleanup
;
221 /* When in a conditional context, this is the specific
222 cleanup list associated with last_unconditional_cleanup,
223 where we place the conditionalized cleanups. */
226 /* For switch (C) or case (Pascal) statements,
227 and also for dummies (see `expand_start_case_dummy'). */
230 /* The insn after which the case dispatch should finally
231 be emitted. Zero for a dummy. */
233 /* A list of case labels; it is first built as an AVL tree.
234 During expand_end_case, this is converted to a list, and may be
235 rearranged into a nearly balanced binary tree. */
236 struct case_node
*case_list
;
237 /* Label to jump to if no case matches. */
239 /* The expression to be dispatched on. */
241 /* Type that INDEX_EXPR should be converted to. */
243 /* Number of range exprs in case statement. */
245 /* Name of this kind of statement, for warnings. */
246 const char *printname
;
247 /* Used to save no_line_numbers till we see the first case label.
248 We set this to -1 when we see the first case label in this
250 int line_number_status
;
255 /* Allocate and return a new `struct nesting'. */
257 #define ALLOC_NESTING() \
258 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
260 /* Pop the nesting stack element by element until we pop off
261 the element which is at the top of STACK.
262 Update all the other stacks, popping off elements from them
263 as we pop them from nesting_stack. */
265 #define POPSTACK(STACK) \
266 do { struct nesting *target = STACK; \
267 struct nesting *this; \
268 do { this = nesting_stack; \
269 if (loop_stack == this) \
270 loop_stack = loop_stack->next; \
271 if (cond_stack == this) \
272 cond_stack = cond_stack->next; \
273 if (block_stack == this) \
274 block_stack = block_stack->next; \
275 if (stack_block_stack == this) \
276 stack_block_stack = stack_block_stack->next; \
277 if (case_stack == this) \
278 case_stack = case_stack->next; \
279 nesting_depth = nesting_stack->depth - 1; \
280 nesting_stack = this->all; \
281 obstack_free (&stmt_obstack, this); } \
282 while (this != target); } while (0)
284 /* In some cases it is impossible to generate code for a forward goto
285 until the label definition is seen. This happens when it may be necessary
286 for the goto to reset the stack pointer: we don't yet know how to do that.
287 So expand_goto puts an entry on this fixup list.
288 Each time a binding contour that resets the stack is exited,
290 If the target label has now been defined, we can insert the proper code. */
294 /* Points to following fixup. */
295 struct goto_fixup
*next
;
296 /* Points to the insn before the jump insn.
297 If more code must be inserted, it goes after this insn. */
299 /* The LABEL_DECL that this jump is jumping to, or 0
300 for break, continue or return. */
302 /* The BLOCK for the place where this goto was found. */
304 /* The CODE_LABEL rtx that this is jumping to. */
306 /* Number of binding contours started in current function
307 before the label reference. */
308 int block_start_count
;
309 /* The outermost stack level that should be restored for this jump.
310 Each time a binding contour that resets the stack is exited,
311 if the target label is *not* yet defined, this slot is updated. */
313 /* List of lists of cleanup expressions to be run by this goto.
314 There is one element for each block that this goto is within.
315 The tail of this list can be 0,
316 if all remaining elements would be empty.
317 The TREE_VALUE contains the cleanup list of that block as of the
318 time this goto was seen.
319 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
320 tree cleanup_list_list
;
323 /* Within any binding contour that must restore a stack level,
324 all labels are recorded with a chain of these structures. */
328 /* Points to following fixup. */
329 struct label_chain
*next
;
335 /* Chain of all pending binding contours. */
336 struct nesting
*x_block_stack
;
338 /* If any new stacks are added here, add them to POPSTACKS too. */
340 /* Chain of all pending binding contours that restore stack levels
342 struct nesting
*x_stack_block_stack
;
344 /* Chain of all pending conditional statements. */
345 struct nesting
*x_cond_stack
;
347 /* Chain of all pending loops. */
348 struct nesting
*x_loop_stack
;
350 /* Chain of all pending case or switch statements. */
351 struct nesting
*x_case_stack
;
353 /* Separate chain including all of the above,
354 chained through the `all' field. */
355 struct nesting
*x_nesting_stack
;
357 /* Number of entries on nesting_stack now. */
360 /* Number of binding contours started so far in this function. */
361 int x_block_start_count
;
363 /* Each time we expand an expression-statement,
364 record the expr's type and its RTL value here. */
365 tree x_last_expr_type
;
366 rtx x_last_expr_value
;
368 /* Nonzero if within a ({...}) grouping, in which case we must
369 always compute a value for each expr-stmt in case it is the last one. */
370 int x_expr_stmts_for_value
;
372 /* Filename and line number of last line-number note,
373 whether we actually emitted it or not. */
374 char *x_emit_filename
;
377 struct goto_fixup
*x_goto_fixup_chain
;
380 #define block_stack (current_function->stmt->x_block_stack)
381 #define stack_block_stack (current_function->stmt->x_stack_block_stack)
382 #define cond_stack (current_function->stmt->x_cond_stack)
383 #define loop_stack (current_function->stmt->x_loop_stack)
384 #define case_stack (current_function->stmt->x_case_stack)
385 #define nesting_stack (current_function->stmt->x_nesting_stack)
386 #define nesting_depth (current_function->stmt->x_nesting_depth)
387 #define current_block_start_count (current_function->stmt->x_block_start_count)
388 #define last_expr_type (current_function->stmt->x_last_expr_type)
389 #define last_expr_value (current_function->stmt->x_last_expr_value)
390 #define expr_stmts_for_value (current_function->stmt->x_expr_stmts_for_value)
391 #define emit_filename (current_function->stmt->x_emit_filename)
392 #define emit_lineno (current_function->stmt->x_emit_lineno)
393 #define goto_fixup_chain (current_function->stmt->x_goto_fixup_chain)
395 /* Non-zero if we are using EH to handle cleanus. */
396 static int using_eh_for_cleanups_p
= 0;
398 /* Character strings, each containing a single decimal digit. */
399 static char *digit_strings
[10];
402 static int n_occurrences
PROTO((int, const char *));
403 static void expand_goto_internal
PROTO((tree
, rtx
, rtx
));
404 static int expand_fixup
PROTO((tree
, rtx
, rtx
));
405 static rtx expand_nl_handler_label
PROTO((rtx
, rtx
));
406 static void expand_nl_goto_receiver
PROTO((void));
407 static void expand_nl_goto_receivers
PROTO((struct nesting
*));
408 static void fixup_gotos
PROTO((struct nesting
*, rtx
, tree
,
410 static void expand_null_return_1
PROTO((rtx
, int));
411 static void expand_value_return
PROTO((rtx
));
412 static int tail_recursion_args
PROTO((tree
, tree
));
413 static void expand_cleanups
PROTO((tree
, tree
, int, int));
414 static void check_seenlabel
PROTO((void));
415 static void do_jump_if_equal
PROTO((rtx
, rtx
, rtx
, int));
416 static int estimate_case_costs
PROTO((case_node_ptr
));
417 static void group_case_nodes
PROTO((case_node_ptr
));
418 static void balance_case_nodes
PROTO((case_node_ptr
*,
420 static int node_has_low_bound
PROTO((case_node_ptr
, tree
));
421 static int node_has_high_bound
PROTO((case_node_ptr
, tree
));
422 static int node_is_bounded
PROTO((case_node_ptr
, tree
));
423 static void emit_jump_if_reachable
PROTO((rtx
));
424 static void emit_case_nodes
PROTO((rtx
, case_node_ptr
, rtx
, tree
));
425 static int add_case_node
PROTO((tree
, tree
, tree
, tree
*));
426 static struct case_node
*case_tree2list
PROTO((case_node
*, case_node
*));
427 static void mark_cond_nesting
PROTO((struct nesting
*));
428 static void mark_loop_nesting
PROTO((struct nesting
*));
429 static void mark_block_nesting
PROTO((struct nesting
*));
430 static void mark_case_nesting
PROTO((struct nesting
*));
431 static void mark_goto_fixup
PROTO((struct goto_fixup
*));
435 using_eh_for_cleanups ()
437 using_eh_for_cleanups_p
= 1;
440 /* Mark N (known to be a cond-nesting) for GC. */
443 mark_cond_nesting (n
)
448 ggc_mark_rtx (n
->exit_label
);
449 ggc_mark_rtx (n
->data
.cond
.endif_label
);
450 ggc_mark_rtx (n
->data
.cond
.next_label
);
456 /* Mark N (known to be a loop-nesting) for GC. */
459 mark_loop_nesting (n
)
465 ggc_mark_rtx (n
->exit_label
);
466 ggc_mark_rtx (n
->data
.loop
.start_label
);
467 ggc_mark_rtx (n
->data
.loop
.end_label
);
468 ggc_mark_rtx (n
->data
.loop
.alt_end_label
);
469 ggc_mark_rtx (n
->data
.loop
.continue_label
);
475 /* Mark N (known to be a block-nesting) for GC. */
478 mark_block_nesting (n
)
483 struct label_chain
*l
;
485 ggc_mark_rtx (n
->exit_label
);
486 ggc_mark_rtx (n
->data
.block
.stack_level
);
487 ggc_mark_rtx (n
->data
.block
.first_insn
);
488 ggc_mark_tree (n
->data
.block
.cleanups
);
489 ggc_mark_tree (n
->data
.block
.outer_cleanups
);
491 for (l
= n
->data
.block
.label_chain
; l
!= NULL
; l
= l
->next
)
492 ggc_mark_tree (l
->label
);
494 ggc_mark_rtx (n
->data
.block
.last_unconditional_cleanup
);
496 /* ??? cleanup_ptr never points outside the stack, does it? */
502 /* Mark N (known to be a case-nesting) for GC. */
505 mark_case_nesting (n
)
510 struct case_node
*node
;
512 ggc_mark_rtx (n
->exit_label
);
513 ggc_mark_rtx (n
->data
.case_stmt
.start
);
515 node
= n
->data
.case_stmt
.case_list
;
518 ggc_mark_tree (node
->low
);
519 ggc_mark_tree (node
->high
);
520 ggc_mark_tree (node
->code_label
);
524 ggc_mark_tree (n
->data
.case_stmt
.default_label
);
525 ggc_mark_tree (n
->data
.case_stmt
.index_expr
);
526 ggc_mark_tree (n
->data
.case_stmt
.nominal_type
);
536 struct goto_fixup
*g
;
540 ggc_mark_rtx (g
->before_jump
);
541 ggc_mark_tree (g
->target
);
542 ggc_mark_tree (g
->context
);
543 ggc_mark_rtx (g
->target_rtl
);
544 ggc_mark_rtx (g
->stack_level
);
545 ggc_mark_tree (g
->cleanup_list_list
);
551 /* Clear out all parts of the state in F that can safely be discarded
552 after the function has been compiled, to let garbage collection
553 reclaim the memory. */
559 /* We're about to free the function obstack. If we hold pointers to
560 things allocated there, then we'll try to mark them when we do
561 GC. So, we clear them out here explicitly. */
571 struct stmt_status
*p
;
576 mark_block_nesting (p
->x_block_stack
);
577 mark_cond_nesting (p
->x_cond_stack
);
578 mark_loop_nesting (p
->x_loop_stack
);
579 mark_case_nesting (p
->x_case_stack
);
581 ggc_mark_tree (p
->x_last_expr_type
);
582 /* last_epxr_value is only valid if last_expr_type is nonzero. */
583 if (p
->x_last_expr_type
)
584 ggc_mark_rtx (p
->x_last_expr_value
);
586 mark_goto_fixup (p
->x_goto_fixup_chain
);
594 gcc_obstack_init (&stmt_obstack
);
596 for (i
= 0; i
< 10; i
++)
598 digit_strings
[i
] = ggc_alloc_string (NULL
, 1);
599 digit_strings
[i
][0] = '0' + i
;
601 ggc_add_string_root (digit_strings
, 10);
605 init_stmt_for_function ()
607 current_function
->stmt
608 = (struct stmt_status
*) xmalloc (sizeof (struct stmt_status
));
610 /* We are not currently within any block, conditional, loop or case. */
612 stack_block_stack
= 0;
619 current_block_start_count
= 0;
621 /* No gotos have been expanded yet. */
622 goto_fixup_chain
= 0;
624 /* We are not processing a ({...}) grouping. */
625 expr_stmts_for_value
= 0;
627 last_expr_value
= NULL_RTX
;
630 /* Return nonzero if anything is pushed on the loop, condition, or case
635 return cond_stack
|| loop_stack
|| case_stack
;
638 /* Record the current file and line. Called from emit_line_note. */
640 set_file_and_line_for_stmt (file
, line
)
644 emit_filename
= file
;
648 /* Emit a no-op instruction. */
655 last_insn
= get_last_insn ();
657 && (GET_CODE (last_insn
) == CODE_LABEL
658 || (GET_CODE (last_insn
) == NOTE
659 && prev_real_insn (last_insn
) == 0)))
660 emit_insn (gen_nop ());
663 /* Return the rtx-label that corresponds to a LABEL_DECL,
664 creating it if necessary. */
670 if (TREE_CODE (label
) != LABEL_DECL
)
673 if (DECL_RTL (label
))
674 return DECL_RTL (label
);
676 return DECL_RTL (label
) = gen_label_rtx ();
679 /* Add an unconditional jump to LABEL as the next sequential instruction. */
685 do_pending_stack_adjust ();
686 emit_jump_insn (gen_jump (label
));
690 /* Emit code to jump to the address
691 specified by the pointer expression EXP. */
694 expand_computed_goto (exp
)
697 rtx x
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
699 #ifdef POINTERS_EXTEND_UNSIGNED
700 x
= convert_memory_address (Pmode
, x
);
704 /* Be sure the function is executable. */
705 if (current_function_check_memory_usage
)
706 emit_library_call (chkr_check_exec_libfunc
, 1,
707 VOIDmode
, 1, x
, ptr_mode
);
709 do_pending_stack_adjust ();
710 emit_indirect_jump (x
);
712 current_function_has_computed_jump
= 1;
715 /* Handle goto statements and the labels that they can go to. */
717 /* Specify the location in the RTL code of a label LABEL,
718 which is a LABEL_DECL tree node.
720 This is used for the kind of label that the user can jump to with a
721 goto statement, and for alternatives of a switch or case statement.
722 RTL labels generated for loops and conditionals don't go through here;
723 they are generated directly at the RTL level, by other functions below.
725 Note that this has nothing to do with defining label *names*.
726 Languages vary in how they do that and what that even means. */
732 struct label_chain
*p
;
734 do_pending_stack_adjust ();
735 emit_label (label_rtx (label
));
736 if (DECL_NAME (label
))
737 LABEL_NAME (DECL_RTL (label
)) = IDENTIFIER_POINTER (DECL_NAME (label
));
739 if (stack_block_stack
!= 0)
741 p
= (struct label_chain
*) oballoc (sizeof (struct label_chain
));
742 p
->next
= stack_block_stack
->data
.block
.label_chain
;
743 stack_block_stack
->data
.block
.label_chain
= p
;
748 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
749 from nested functions. */
752 declare_nonlocal_label (label
)
755 rtx slot
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
757 nonlocal_labels
= tree_cons (NULL_TREE
, label
, nonlocal_labels
);
758 LABEL_PRESERVE_P (label_rtx (label
)) = 1;
759 if (nonlocal_goto_handler_slots
== 0)
761 emit_stack_save (SAVE_NONLOCAL
,
762 &nonlocal_goto_stack_level
,
763 PREV_INSN (tail_recursion_reentry
));
765 nonlocal_goto_handler_slots
766 = gen_rtx_EXPR_LIST (VOIDmode
, slot
, nonlocal_goto_handler_slots
);
769 /* Generate RTL code for a `goto' statement with target label LABEL.
770 LABEL should be a LABEL_DECL tree node that was or will later be
771 defined with `expand_label'. */
779 /* Check for a nonlocal goto to a containing function. */
780 context
= decl_function_context (label
);
781 if (context
!= 0 && context
!= current_function_decl
)
783 struct function
*p
= find_function_data (context
);
784 rtx label_ref
= gen_rtx_LABEL_REF (Pmode
, label_rtx (label
));
785 rtx temp
, handler_slot
;
788 /* Find the corresponding handler slot for this label. */
789 handler_slot
= p
->x_nonlocal_goto_handler_slots
;
790 for (link
= p
->x_nonlocal_labels
; TREE_VALUE (link
) != label
;
791 link
= TREE_CHAIN (link
))
792 handler_slot
= XEXP (handler_slot
, 1);
793 handler_slot
= XEXP (handler_slot
, 0);
795 p
->has_nonlocal_label
= 1;
796 current_function_has_nonlocal_goto
= 1;
797 LABEL_REF_NONLOCAL_P (label_ref
) = 1;
799 /* Copy the rtl for the slots so that they won't be shared in
800 case the virtual stack vars register gets instantiated differently
801 in the parent than in the child. */
803 #if HAVE_nonlocal_goto
804 if (HAVE_nonlocal_goto
)
805 emit_insn (gen_nonlocal_goto (lookup_static_chain (label
),
806 copy_rtx (handler_slot
),
807 copy_rtx (p
->x_nonlocal_goto_stack_level
),
814 /* Restore frame pointer for containing function.
815 This sets the actual hard register used for the frame pointer
816 to the location of the function's incoming static chain info.
817 The non-local goto handler will then adjust it to contain the
818 proper value and reload the argument pointer, if needed. */
819 emit_move_insn (hard_frame_pointer_rtx
, lookup_static_chain (label
));
821 /* We have now loaded the frame pointer hardware register with
822 the address of that corresponds to the start of the virtual
823 stack vars. So replace virtual_stack_vars_rtx in all
824 addresses we use with stack_pointer_rtx. */
826 /* Get addr of containing function's current nonlocal goto handler,
827 which will do any cleanups and then jump to the label. */
828 addr
= copy_rtx (handler_slot
);
829 temp
= copy_to_reg (replace_rtx (addr
, virtual_stack_vars_rtx
,
830 hard_frame_pointer_rtx
));
832 /* Restore the stack pointer. Note this uses fp just restored. */
833 addr
= p
->x_nonlocal_goto_stack_level
;
835 addr
= replace_rtx (copy_rtx (addr
),
836 virtual_stack_vars_rtx
,
837 hard_frame_pointer_rtx
);
839 emit_stack_restore (SAVE_NONLOCAL
, addr
, NULL_RTX
);
841 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
843 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
844 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
845 emit_indirect_jump (temp
);
849 expand_goto_internal (label
, label_rtx (label
), NULL_RTX
);
852 /* Generate RTL code for a `goto' statement with target label BODY.
853 LABEL should be a LABEL_REF.
854 LAST_INSN, if non-0, is the rtx we should consider as the last
855 insn emitted (for the purposes of cleaning up a return). */
858 expand_goto_internal (body
, label
, last_insn
)
863 struct nesting
*block
;
866 if (GET_CODE (label
) != CODE_LABEL
)
869 /* If label has already been defined, we can tell now
870 whether and how we must alter the stack level. */
872 if (PREV_INSN (label
) != 0)
874 /* Find the innermost pending block that contains the label.
875 (Check containment by comparing insn-uids.)
876 Then restore the outermost stack level within that block,
877 and do cleanups of all blocks contained in it. */
878 for (block
= block_stack
; block
; block
= block
->next
)
880 if (INSN_UID (block
->data
.block
.first_insn
) < INSN_UID (label
))
882 if (block
->data
.block
.stack_level
!= 0)
883 stack_level
= block
->data
.block
.stack_level
;
884 /* Execute the cleanups for blocks we are exiting. */
885 if (block
->data
.block
.cleanups
!= 0)
887 expand_cleanups (block
->data
.block
.cleanups
, NULL_TREE
, 1, 1);
888 do_pending_stack_adjust ();
894 /* Ensure stack adjust isn't done by emit_jump, as this
895 would clobber the stack pointer. This one should be
896 deleted as dead by flow. */
897 clear_pending_stack_adjust ();
898 do_pending_stack_adjust ();
899 emit_stack_restore (SAVE_BLOCK
, stack_level
, NULL_RTX
);
902 if (body
!= 0 && DECL_TOO_LATE (body
))
903 error ("jump to `%s' invalidly jumps into binding contour",
904 IDENTIFIER_POINTER (DECL_NAME (body
)));
906 /* Label not yet defined: may need to put this goto
907 on the fixup list. */
908 else if (! expand_fixup (body
, label
, last_insn
))
910 /* No fixup needed. Record that the label is the target
911 of at least one goto that has no fixup. */
913 TREE_ADDRESSABLE (body
) = 1;
919 /* Generate if necessary a fixup for a goto
920 whose target label in tree structure (if any) is TREE_LABEL
921 and whose target in rtl is RTL_LABEL.
923 If LAST_INSN is nonzero, we pretend that the jump appears
924 after insn LAST_INSN instead of at the current point in the insn stream.
926 The fixup will be used later to insert insns just before the goto.
927 Those insns will restore the stack level as appropriate for the
928 target label, and will (in the case of C++) also invoke any object
929 destructors which have to be invoked when we exit the scopes which
930 are exited by the goto.
932 Value is nonzero if a fixup is made. */
935 expand_fixup (tree_label
, rtl_label
, last_insn
)
940 struct nesting
*block
, *end_block
;
942 /* See if we can recognize which block the label will be output in.
943 This is possible in some very common cases.
944 If we succeed, set END_BLOCK to that block.
945 Otherwise, set it to 0. */
948 && (rtl_label
== cond_stack
->data
.cond
.endif_label
949 || rtl_label
== cond_stack
->data
.cond
.next_label
))
950 end_block
= cond_stack
;
951 /* If we are in a loop, recognize certain labels which
952 are likely targets. This reduces the number of fixups
953 we need to create. */
955 && (rtl_label
== loop_stack
->data
.loop
.start_label
956 || rtl_label
== loop_stack
->data
.loop
.end_label
957 || rtl_label
== loop_stack
->data
.loop
.continue_label
))
958 end_block
= loop_stack
;
962 /* Now set END_BLOCK to the binding level to which we will return. */
966 struct nesting
*next_block
= end_block
->all
;
969 /* First see if the END_BLOCK is inside the innermost binding level.
970 If so, then no cleanups or stack levels are relevant. */
971 while (next_block
&& next_block
!= block
)
972 next_block
= next_block
->all
;
977 /* Otherwise, set END_BLOCK to the innermost binding level
978 which is outside the relevant control-structure nesting. */
979 next_block
= block_stack
->next
;
980 for (block
= block_stack
; block
!= end_block
; block
= block
->all
)
981 if (block
== next_block
)
982 next_block
= next_block
->next
;
983 end_block
= next_block
;
986 /* Does any containing block have a stack level or cleanups?
987 If not, no fixup is needed, and that is the normal case
988 (the only case, for standard C). */
989 for (block
= block_stack
; block
!= end_block
; block
= block
->next
)
990 if (block
->data
.block
.stack_level
!= 0
991 || block
->data
.block
.cleanups
!= 0)
994 if (block
!= end_block
)
996 /* Ok, a fixup is needed. Add a fixup to the list of such. */
997 struct goto_fixup
*fixup
998 = (struct goto_fixup
*) oballoc (sizeof (struct goto_fixup
));
999 /* In case an old stack level is restored, make sure that comes
1000 after any pending stack adjust. */
1001 /* ?? If the fixup isn't to come at the present position,
1002 doing the stack adjust here isn't useful. Doing it with our
1003 settings at that location isn't useful either. Let's hope
1006 do_pending_stack_adjust ();
1007 fixup
->target
= tree_label
;
1008 fixup
->target_rtl
= rtl_label
;
1010 /* Create a BLOCK node and a corresponding matched set of
1011 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1012 this point. The notes will encapsulate any and all fixup
1013 code which we might later insert at this point in the insn
1014 stream. Also, the BLOCK node will be the parent (i.e. the
1015 `SUPERBLOCK') of any other BLOCK nodes which we might create
1016 later on when we are expanding the fixup code.
1018 Note that optimization passes (including expand_end_loop)
1019 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1020 as a placeholder. */
1023 register rtx original_before_jump
1024 = last_insn
? last_insn
: get_last_insn ();
1028 block
= make_node (BLOCK
);
1029 TREE_USED (block
) = 1;
1031 if (current_function
->x_whole_function_mode_p
)
1033 find_loop_tree_blocks ();
1034 retrofit_block (block
, original_before_jump
);
1037 insert_block (block
);
1040 start
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_BEG
);
1041 fixup
->before_jump
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
1042 emit_note (NULL_PTR
, NOTE_INSN_BLOCK_END
);
1043 fixup
->context
= block
;
1045 emit_insns_after (start
, original_before_jump
);
1048 fixup
->block_start_count
= current_block_start_count
;
1049 fixup
->stack_level
= 0;
1050 fixup
->cleanup_list_list
1051 = ((block
->data
.block
.outer_cleanups
1052 || block
->data
.block
.cleanups
)
1053 ? tree_cons (NULL_TREE
, block
->data
.block
.cleanups
,
1054 block
->data
.block
.outer_cleanups
)
1056 fixup
->next
= goto_fixup_chain
;
1057 goto_fixup_chain
= fixup
;
1065 /* Expand any needed fixups in the outputmost binding level of the
1066 function. FIRST_INSN is the first insn in the function. */
1069 expand_fixups (first_insn
)
1072 fixup_gotos (NULL_PTR
, NULL_RTX
, NULL_TREE
, first_insn
, 0);
1075 /* When exiting a binding contour, process all pending gotos requiring fixups.
1076 THISBLOCK is the structure that describes the block being exited.
1077 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1078 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1079 FIRST_INSN is the insn that began this contour.
1081 Gotos that jump out of this contour must restore the
1082 stack level and do the cleanups before actually jumping.
1084 DONT_JUMP_IN nonzero means report error there is a jump into this
1085 contour from before the beginning of the contour.
1086 This is also done if STACK_LEVEL is nonzero. */
1089 fixup_gotos (thisblock
, stack_level
, cleanup_list
, first_insn
, dont_jump_in
)
1090 struct nesting
*thisblock
;
1096 register struct goto_fixup
*f
, *prev
;
1098 /* F is the fixup we are considering; PREV is the previous one. */
1099 /* We run this loop in two passes so that cleanups of exited blocks
1100 are run first, and blocks that are exited are marked so
1103 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
1105 /* Test for a fixup that is inactive because it is already handled. */
1106 if (f
->before_jump
== 0)
1108 /* Delete inactive fixup from the chain, if that is easy to do. */
1110 prev
->next
= f
->next
;
1112 /* Has this fixup's target label been defined?
1113 If so, we can finalize it. */
1114 else if (PREV_INSN (f
->target_rtl
) != 0)
1116 register rtx cleanup_insns
;
1118 /* Get the first non-label after the label
1119 this goto jumps to. If that's before this scope begins,
1120 we don't have a jump into the scope. */
1121 rtx after_label
= f
->target_rtl
;
1122 while (after_label
!= 0 && GET_CODE (after_label
) == CODE_LABEL
)
1123 after_label
= NEXT_INSN (after_label
);
1125 /* If this fixup jumped into this contour from before the beginning
1126 of this contour, report an error. */
1127 /* ??? Bug: this does not detect jumping in through intermediate
1128 blocks that have stack levels or cleanups.
1129 It detects only a problem with the innermost block
1130 around the label. */
1132 && (dont_jump_in
|| stack_level
|| cleanup_list
)
1133 /* If AFTER_LABEL is 0, it means the jump goes to the end
1134 of the rtl, which means it jumps into this scope. */
1135 && (after_label
== 0
1136 || INSN_UID (first_insn
) < INSN_UID (after_label
))
1137 && INSN_UID (first_insn
) > INSN_UID (f
->before_jump
)
1138 && ! DECL_ERROR_ISSUED (f
->target
))
1140 error_with_decl (f
->target
,
1141 "label `%s' used before containing binding contour");
1142 /* Prevent multiple errors for one label. */
1143 DECL_ERROR_ISSUED (f
->target
) = 1;
1146 /* We will expand the cleanups into a sequence of their own and
1147 then later on we will attach this new sequence to the insn
1148 stream just ahead of the actual jump insn. */
1152 /* Temporarily restore the lexical context where we will
1153 logically be inserting the fixup code. We do this for the
1154 sake of getting the debugging information right. */
1157 set_block (f
->context
);
1159 /* Expand the cleanups for blocks this jump exits. */
1160 if (f
->cleanup_list_list
)
1163 for (lists
= f
->cleanup_list_list
; lists
; lists
= TREE_CHAIN (lists
))
1164 /* Marked elements correspond to blocks that have been closed.
1165 Do their cleanups. */
1166 if (TREE_ADDRESSABLE (lists
)
1167 && TREE_VALUE (lists
) != 0)
1169 expand_cleanups (TREE_VALUE (lists
), NULL_TREE
, 1, 1);
1170 /* Pop any pushes done in the cleanups,
1171 in case function is about to return. */
1172 do_pending_stack_adjust ();
1176 /* Restore stack level for the biggest contour that this
1177 jump jumps out of. */
1179 emit_stack_restore (SAVE_BLOCK
, f
->stack_level
, f
->before_jump
);
1181 /* Finish up the sequence containing the insns which implement the
1182 necessary cleanups, and then attach that whole sequence to the
1183 insn stream just ahead of the actual jump insn. Attaching it
1184 at that point insures that any cleanups which are in fact
1185 implicit C++ object destructions (which must be executed upon
1186 leaving the block) appear (to the debugger) to be taking place
1187 in an area of the generated code where the object(s) being
1188 destructed are still "in scope". */
1190 cleanup_insns
= get_insns ();
1194 emit_insns_after (cleanup_insns
, f
->before_jump
);
1201 /* For any still-undefined labels, do the cleanups for this block now.
1202 We must do this now since items in the cleanup list may go out
1203 of scope when the block ends. */
1204 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
1205 if (f
->before_jump
!= 0
1206 && PREV_INSN (f
->target_rtl
) == 0
1207 /* Label has still not appeared. If we are exiting a block with
1208 a stack level to restore, that started before the fixup,
1209 mark this stack level as needing restoration
1210 when the fixup is later finalized. */
1212 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1213 means the label is undefined. That's erroneous, but possible. */
1214 && (thisblock
->data
.block
.block_start_count
1215 <= f
->block_start_count
))
1217 tree lists
= f
->cleanup_list_list
;
1220 for (; lists
; lists
= TREE_CHAIN (lists
))
1221 /* If the following elt. corresponds to our containing block
1222 then the elt. must be for this block. */
1223 if (TREE_CHAIN (lists
) == thisblock
->data
.block
.outer_cleanups
)
1227 set_block (f
->context
);
1228 expand_cleanups (TREE_VALUE (lists
), NULL_TREE
, 1, 1);
1229 do_pending_stack_adjust ();
1230 cleanup_insns
= get_insns ();
1233 if (cleanup_insns
!= 0)
1235 = emit_insns_after (cleanup_insns
, f
->before_jump
);
1237 f
->cleanup_list_list
= TREE_CHAIN (lists
);
1241 f
->stack_level
= stack_level
;
1245 /* Return the number of times character C occurs in string S. */
1247 n_occurrences (c
, s
)
1257 /* Generate RTL for an asm statement (explicit assembler code).
1258 BODY is a STRING_CST node containing the assembler code text,
1259 or an ADDR_EXPR containing a STRING_CST. */
1265 if (current_function_check_memory_usage
)
1267 error ("`asm' cannot be used in function where memory usage is checked");
1271 if (TREE_CODE (body
) == ADDR_EXPR
)
1272 body
= TREE_OPERAND (body
, 0);
1274 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
,
1275 TREE_STRING_POINTER (body
)));
1279 /* Generate RTL for an asm statement with arguments.
1280 STRING is the instruction template.
1281 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1282 Each output or input has an expression in the TREE_VALUE and
1283 a constraint-string in the TREE_PURPOSE.
1284 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1285 that is clobbered by this insn.
1287 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1288 Some elements of OUTPUTS may be replaced with trees representing temporary
1289 values. The caller should copy those temporary values to the originally
1292 VOL nonzero means the insn is volatile; don't optimize it. */
1295 expand_asm_operands (string
, outputs
, inputs
, clobbers
, vol
, filename
, line
)
1296 tree string
, outputs
, inputs
, clobbers
;
1301 rtvec argvec
, constraints
;
1303 int ninputs
= list_length (inputs
);
1304 int noutputs
= list_length (outputs
);
1309 /* Vector of RTX's of evaluated output operands. */
1310 rtx
*output_rtx
= (rtx
*) alloca (noutputs
* sizeof (rtx
));
1311 int *inout_opnum
= (int *) alloca (noutputs
* sizeof (int));
1312 rtx
*real_output_rtx
= (rtx
*) alloca (noutputs
* sizeof (rtx
));
1313 enum machine_mode
*inout_mode
1314 = (enum machine_mode
*) alloca (noutputs
* sizeof (enum machine_mode
));
1315 /* The insn we have emitted. */
1318 /* An ASM with no outputs needs to be treated as volatile, for now. */
1322 if (current_function_check_memory_usage
)
1324 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1328 #ifdef MD_ASM_CLOBBERS
1329 /* Sometimes we wish to automatically clobber registers across an asm.
1330 Case in point is when the i386 backend moved from cc0 to a hard reg --
1331 maintaining source-level compatability means automatically clobbering
1332 the flags register. */
1333 MD_ASM_CLOBBERS (clobbers
);
1336 if (current_function_check_memory_usage
)
1338 error ("`asm' cannot be used in function where memory usage is checked");
1342 /* Count the number of meaningful clobbered registers, ignoring what
1343 we would ignore later. */
1345 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1347 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1348 i
= decode_reg_name (regname
);
1349 if (i
>= 0 || i
== -4)
1352 error ("unknown register name `%s' in `asm'", regname
);
1357 /* Check that the number of alternatives is constant across all
1359 if (outputs
|| inputs
)
1361 tree tmp
= TREE_PURPOSE (outputs
? outputs
: inputs
);
1362 int nalternatives
= n_occurrences (',', TREE_STRING_POINTER (tmp
));
1365 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
1367 error ("too many alternatives in `asm'");
1374 char *constraint
= TREE_STRING_POINTER (TREE_PURPOSE (tmp
));
1375 if (n_occurrences (',', constraint
) != nalternatives
)
1377 error ("operand constraints for `asm' differ in number of alternatives");
1380 if (TREE_CHAIN (tmp
))
1381 tmp
= TREE_CHAIN (tmp
);
1383 tmp
= next
, next
= 0;
1387 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1389 tree val
= TREE_VALUE (tail
);
1390 tree type
= TREE_TYPE (val
);
1399 /* If there's an erroneous arg, emit no insn. */
1400 if (TREE_TYPE (val
) == error_mark_node
)
1403 /* Make sure constraint has `=' and does not have `+'. Also, see
1404 if it allows any register. Be liberal on the latter test, since
1405 the worst that happens if we get it wrong is we issue an error
1408 c_len
= TREE_STRING_LENGTH (TREE_PURPOSE (tail
)) - 1;
1409 constraint
= TREE_STRING_POINTER (TREE_PURPOSE (tail
));
1411 /* Allow the `=' or `+' to not be at the beginning of the string,
1412 since it wasn't explicitly documented that way, and there is a
1413 large body of code that puts it last. Swap the character to
1414 the front, so as not to uglify any place else. */
1418 if ((p
= strchr (constraint
, '=')) != NULL
)
1420 if ((p
= strchr (constraint
, '+')) != NULL
)
1423 error ("output operand constraint lacks `='");
1427 if (p
!= constraint
)
1430 bcopy (constraint
, constraint
+1, p
-constraint
);
1433 warning ("output constraint `%c' for operand %d is not at the beginning", j
, i
);
1436 is_inout
= constraint
[0] == '+';
1437 /* Replace '+' with '='. */
1438 constraint
[0] = '=';
1439 /* Make sure we can specify the matching operand. */
1440 if (is_inout
&& i
> 9)
1442 error ("output operand constraint %d contains `+'", i
);
1446 for (j
= 1; j
< c_len
; j
++)
1447 switch (constraint
[j
])
1451 error ("operand constraint contains '+' or '=' at illegal position.");
1455 if (i
+ 1 == ninputs
+ noutputs
)
1457 error ("`%%' constraint used with last operand");
1462 case '?': case '!': case '*': case '&':
1463 case 'E': case 'F': case 'G': case 'H':
1464 case 's': case 'i': case 'n':
1465 case 'I': case 'J': case 'K': case 'L': case 'M':
1466 case 'N': case 'O': case 'P': case ',':
1467 #ifdef EXTRA_CONSTRAINT
1468 case 'Q': case 'R': case 'S': case 'T': case 'U':
1472 case '0': case '1': case '2': case '3': case '4':
1473 case '5': case '6': case '7': case '8': case '9':
1474 error ("matching constraint not valid in output operand");
1477 case 'V': case 'm': case 'o':
1482 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1483 excepting those that expand_call created. So match memory
1499 /* If an output operand is not a decl or indirect ref and our constraint
1500 allows a register, make a temporary to act as an intermediate.
1501 Make the asm insn write into that, then our caller will copy it to
1502 the real output operand. Likewise for promoted variables. */
1504 real_output_rtx
[i
] = NULL_RTX
;
1505 if ((TREE_CODE (val
) == INDIRECT_REF
1507 || (TREE_CODE_CLASS (TREE_CODE (val
)) == 'd'
1508 && (allows_mem
|| GET_CODE (DECL_RTL (val
)) == REG
)
1509 && ! (GET_CODE (DECL_RTL (val
)) == REG
1510 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
1515 mark_addressable (TREE_VALUE (tail
));
1518 = expand_expr (TREE_VALUE (tail
), NULL_RTX
, VOIDmode
,
1519 EXPAND_MEMORY_USE_WO
);
1521 if (! allows_reg
&& GET_CODE (output_rtx
[i
]) != MEM
)
1522 error ("output number %d not directly addressable", i
);
1523 if (! allows_mem
&& GET_CODE (output_rtx
[i
]) == MEM
)
1525 real_output_rtx
[i
] = protect_from_queue (output_rtx
[i
], 1);
1526 output_rtx
[i
] = gen_reg_rtx (GET_MODE (output_rtx
[i
]));
1528 emit_move_insn (output_rtx
[i
], real_output_rtx
[i
]);
1533 output_rtx
[i
] = assign_temp (type
, 0, 0, 0);
1534 TREE_VALUE (tail
) = make_tree (type
, output_rtx
[i
]);
1539 inout_mode
[ninout
] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
)));
1540 inout_opnum
[ninout
++] = i
;
1545 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
1547 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS
);
1551 /* Make vectors for the expression-rtx and constraint strings. */
1553 argvec
= rtvec_alloc (ninputs
);
1554 constraints
= rtvec_alloc (ninputs
);
1556 body
= gen_rtx_ASM_OPERANDS (VOIDmode
, TREE_STRING_POINTER (string
),
1557 empty_string
, 0, argvec
, constraints
,
1560 MEM_VOLATILE_P (body
) = vol
;
1562 /* Eval the inputs and put them into ARGVEC.
1563 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1566 for (tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
))
1569 int allows_reg
= 0, allows_mem
= 0;
1570 char *constraint
, *orig_constraint
;
1574 /* If there's an erroneous arg, emit no insn,
1575 because the ASM_INPUT would get VOIDmode
1576 and that could cause a crash in reload. */
1577 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
1580 /* ??? Can this happen, and does the error message make any sense? */
1581 if (TREE_PURPOSE (tail
) == NULL_TREE
)
1583 error ("hard register `%s' listed as input operand to `asm'",
1584 TREE_STRING_POINTER (TREE_VALUE (tail
)) );
1588 c_len
= TREE_STRING_LENGTH (TREE_PURPOSE (tail
)) - 1;
1589 constraint
= TREE_STRING_POINTER (TREE_PURPOSE (tail
));
1590 orig_constraint
= constraint
;
1592 /* Make sure constraint has neither `=', `+', nor '&'. */
1594 for (j
= 0; j
< c_len
; j
++)
1595 switch (constraint
[j
])
1597 case '+': case '=': case '&':
1598 if (constraint
== orig_constraint
)
1600 error ("input operand constraint contains `%c'", constraint
[j
]);
1606 if (constraint
== orig_constraint
1607 && i
+ 1 == ninputs
- ninout
)
1609 error ("`%%' constraint used with last operand");
1614 case 'V': case 'm': case 'o':
1619 case '?': case '!': case '*':
1620 case 'E': case 'F': case 'G': case 'H': case 'X':
1621 case 's': case 'i': case 'n':
1622 case 'I': case 'J': case 'K': case 'L': case 'M':
1623 case 'N': case 'O': case 'P': case ',':
1624 #ifdef EXTRA_CONSTRAINT
1625 case 'Q': case 'R': case 'S': case 'T': case 'U':
1629 /* Whether or not a numeric constraint allows a register is
1630 decided by the matching constraint, and so there is no need
1631 to do anything special with them. We must handle them in
1632 the default case, so that we don't unnecessarily force
1633 operands to memory. */
1634 case '0': case '1': case '2': case '3': case '4':
1635 case '5': case '6': case '7': case '8': case '9':
1636 if (constraint
[j
] >= '0' + noutputs
)
1639 ("matching constraint references invalid operand number");
1643 /* Try and find the real constraint for this dup. */
1644 if ((j
== 0 && c_len
== 1)
1645 || (j
== 1 && c_len
== 2 && constraint
[0] == '%'))
1648 for (j
= constraint
[j
] - '0'; j
> 0; --j
)
1651 c_len
= TREE_STRING_LENGTH (TREE_PURPOSE (o
)) - 1;
1652 constraint
= TREE_STRING_POINTER (TREE_PURPOSE (o
));
1657 /* ... fall through ... */
1670 if (! allows_reg
&& allows_mem
)
1671 mark_addressable (TREE_VALUE (tail
));
1673 op
= expand_expr (TREE_VALUE (tail
), NULL_RTX
, VOIDmode
, 0);
1675 if (asm_operand_ok (op
, constraint
) <= 0)
1678 op
= force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))), op
);
1679 else if (!allows_mem
)
1680 warning ("asm operand %d probably doesn't match constraints", i
);
1681 else if (CONSTANT_P (op
))
1682 op
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))),
1684 else if (GET_CODE (op
) == REG
1685 || GET_CODE (op
) == SUBREG
1686 || GET_CODE (op
) == CONCAT
)
1688 tree type
= TREE_TYPE (TREE_VALUE (tail
));
1689 rtx memloc
= assign_temp (type
, 1, 1, 1);
1691 emit_move_insn (memloc
, op
);
1694 else if (GET_CODE (op
) == MEM
&& MEM_VOLATILE_P (op
))
1695 /* We won't recognize volatile memory as available a
1696 memory_operand at this point. Ignore it. */
1698 else if (queued_subexp_p (op
))
1701 /* ??? Leave this only until we have experience with what
1702 happens in combine and elsewhere when constraints are
1704 warning ("asm operand %d probably doesn't match constraints", i
);
1706 XVECEXP (body
, 3, i
) = op
;
1708 XVECEXP (body
, 4, i
) /* constraints */
1709 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))),
1714 /* Protect all the operands from the queue,
1715 now that they have all been evaluated. */
1717 for (i
= 0; i
< ninputs
- ninout
; i
++)
1718 XVECEXP (body
, 3, i
) = protect_from_queue (XVECEXP (body
, 3, i
), 0);
1720 for (i
= 0; i
< noutputs
; i
++)
1721 output_rtx
[i
] = protect_from_queue (output_rtx
[i
], 1);
1723 /* For in-out operands, copy output rtx to input rtx. */
1724 for (i
= 0; i
< ninout
; i
++)
1726 int j
= inout_opnum
[i
];
1728 XVECEXP (body
, 3, ninputs
- ninout
+ i
) /* argvec */
1730 XVECEXP (body
, 4, ninputs
- ninout
+ i
) /* constraints */
1731 = gen_rtx_ASM_INPUT (inout_mode
[i
], digit_strings
[j
]);
1734 /* Now, for each output, construct an rtx
1735 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1736 ARGVEC CONSTRAINTS))
1737 If there is more than one, put them inside a PARALLEL. */
1739 if (noutputs
== 1 && nclobbers
== 0)
1741 XSTR (body
, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs
));
1742 insn
= emit_insn (gen_rtx_SET (VOIDmode
, output_rtx
[0], body
));
1744 else if (noutputs
== 0 && nclobbers
== 0)
1746 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1747 insn
= emit_insn (body
);
1753 if (num
== 0) num
= 1;
1754 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
1756 /* For each output operand, store a SET. */
1758 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1760 XVECEXP (body
, 0, i
)
1761 = gen_rtx_SET (VOIDmode
,
1763 gen_rtx_ASM_OPERANDS
1765 TREE_STRING_POINTER (string
),
1766 TREE_STRING_POINTER (TREE_PURPOSE (tail
)),
1767 i
, argvec
, constraints
,
1770 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
1773 /* If there are no outputs (but there are some clobbers)
1774 store the bare ASM_OPERANDS into the PARALLEL. */
1777 XVECEXP (body
, 0, i
++) = obody
;
1779 /* Store (clobber REG) for each clobbered register specified. */
1781 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1783 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1784 int j
= decode_reg_name (regname
);
1788 if (j
== -3) /* `cc', which is not a register */
1791 if (j
== -4) /* `memory', don't cache memory across asm */
1793 XVECEXP (body
, 0, i
++)
1794 = gen_rtx_CLOBBER (VOIDmode
,
1797 gen_rtx_SCRATCH (VOIDmode
)));
1801 /* Ignore unknown register, error already signaled. */
1805 /* Use QImode since that's guaranteed to clobber just one reg. */
1806 XVECEXP (body
, 0, i
++)
1807 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (QImode
, j
));
1810 insn
= emit_insn (body
);
1813 /* For any outputs that needed reloading into registers, spill them
1814 back to where they belong. */
1815 for (i
= 0; i
< noutputs
; ++i
)
1816 if (real_output_rtx
[i
])
1817 emit_move_insn (real_output_rtx
[i
], output_rtx
[i
]);
1822 /* Generate RTL to evaluate the expression EXP
1823 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1826 expand_expr_stmt (exp
)
1829 /* If -W, warn about statements with no side effects,
1830 except for an explicit cast to void (e.g. for assert()), and
1831 except inside a ({...}) where they may be useful. */
1832 if (expr_stmts_for_value
== 0 && exp
!= error_mark_node
)
1834 if (! TREE_SIDE_EFFECTS (exp
) && (extra_warnings
|| warn_unused
)
1835 && !(TREE_CODE (exp
) == CONVERT_EXPR
1836 && TREE_TYPE (exp
) == void_type_node
))
1837 warning_with_file_and_line (emit_filename
, emit_lineno
,
1838 "statement with no effect");
1839 else if (warn_unused
)
1840 warn_if_unused_value (exp
);
1843 /* If EXP is of function type and we are expanding statements for
1844 value, convert it to pointer-to-function. */
1845 if (expr_stmts_for_value
&& TREE_CODE (TREE_TYPE (exp
)) == FUNCTION_TYPE
)
1846 exp
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (exp
)), exp
);
1848 last_expr_type
= TREE_TYPE (exp
);
1849 last_expr_value
= expand_expr (exp
,
1850 (expr_stmts_for_value
1851 ? NULL_RTX
: const0_rtx
),
1854 /* If all we do is reference a volatile value in memory,
1855 copy it to a register to be sure it is actually touched. */
1856 if (last_expr_value
!= 0 && GET_CODE (last_expr_value
) == MEM
1857 && TREE_THIS_VOLATILE (exp
))
1859 if (TYPE_MODE (TREE_TYPE (exp
)) == VOIDmode
)
1861 else if (TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
)
1862 copy_to_reg (last_expr_value
);
1865 rtx lab
= gen_label_rtx ();
1867 /* Compare the value with itself to reference it. */
1868 emit_cmp_and_jump_insns (last_expr_value
, last_expr_value
, EQ
,
1869 expand_expr (TYPE_SIZE (last_expr_type
),
1870 NULL_RTX
, VOIDmode
, 0),
1872 TYPE_ALIGN (last_expr_type
) / BITS_PER_UNIT
,
1878 /* If this expression is part of a ({...}) and is in memory, we may have
1879 to preserve temporaries. */
1880 preserve_temp_slots (last_expr_value
);
1882 /* Free any temporaries used to evaluate this expression. Any temporary
1883 used as a result of this expression will already have been preserved
1890 /* Warn if EXP contains any computations whose results are not used.
1891 Return 1 if a warning is printed; 0 otherwise. */
1894 warn_if_unused_value (exp
)
1897 if (TREE_USED (exp
))
1900 switch (TREE_CODE (exp
))
1902 case PREINCREMENT_EXPR
:
1903 case POSTINCREMENT_EXPR
:
1904 case PREDECREMENT_EXPR
:
1905 case POSTDECREMENT_EXPR
:
1910 case METHOD_CALL_EXPR
:
1912 case TRY_CATCH_EXPR
:
1913 case WITH_CLEANUP_EXPR
:
1915 /* We don't warn about COND_EXPR because it may be a useful
1916 construct if either arm contains a side effect. */
1921 /* For a binding, warn if no side effect within it. */
1922 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1925 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1927 case TRUTH_ORIF_EXPR
:
1928 case TRUTH_ANDIF_EXPR
:
1929 /* In && or ||, warn if 2nd operand has no side effect. */
1930 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1933 if (TREE_NO_UNUSED_WARNING (exp
))
1935 if (warn_if_unused_value (TREE_OPERAND (exp
, 0)))
1937 /* Let people do `(foo (), 0)' without a warning. */
1938 if (TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
1940 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1944 case NON_LVALUE_EXPR
:
1945 /* Don't warn about values cast to void. */
1946 if (TREE_TYPE (exp
) == void_type_node
)
1948 /* Don't warn about conversions not explicit in the user's program. */
1949 if (TREE_NO_UNUSED_WARNING (exp
))
1951 /* Assignment to a cast usually results in a cast of a modify.
1952 Don't complain about that. There can be an arbitrary number of
1953 casts before the modify, so we must loop until we find the first
1954 non-cast expression and then test to see if that is a modify. */
1956 tree tem
= TREE_OPERAND (exp
, 0);
1958 while (TREE_CODE (tem
) == CONVERT_EXPR
|| TREE_CODE (tem
) == NOP_EXPR
)
1959 tem
= TREE_OPERAND (tem
, 0);
1961 if (TREE_CODE (tem
) == MODIFY_EXPR
|| TREE_CODE (tem
) == INIT_EXPR
1962 || TREE_CODE (tem
) == CALL_EXPR
)
1968 /* Don't warn about automatic dereferencing of references, since
1969 the user cannot control it. */
1970 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == REFERENCE_TYPE
)
1971 return warn_if_unused_value (TREE_OPERAND (exp
, 0));
1972 /* ... fall through ... */
1975 /* Referencing a volatile value is a side effect, so don't warn. */
1976 if ((TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd'
1977 || TREE_CODE_CLASS (TREE_CODE (exp
)) == 'r')
1978 && TREE_THIS_VOLATILE (exp
))
1981 warning_with_file_and_line (emit_filename
, emit_lineno
,
1982 "value computed is not used");
1987 /* Clear out the memory of the last expression evaluated. */
1995 /* Begin a statement which will return a value.
1996 Return the RTL_EXPR for this statement expr.
1997 The caller must save that value and pass it to expand_end_stmt_expr. */
2000 expand_start_stmt_expr ()
2005 /* Make the RTL_EXPR node temporary, not momentary,
2006 so that rtl_expr_chain doesn't become garbage. */
2007 momentary
= suspend_momentary ();
2008 t
= make_node (RTL_EXPR
);
2009 resume_momentary (momentary
);
2010 do_pending_stack_adjust ();
2011 start_sequence_for_rtl_expr (t
);
2013 expr_stmts_for_value
++;
2017 /* Restore the previous state at the end of a statement that returns a value.
2018 Returns a tree node representing the statement's value and the
2019 insns to compute the value.
2021 The nodes of that expression have been freed by now, so we cannot use them.
2022 But we don't want to do that anyway; the expression has already been
2023 evaluated and now we just want to use the value. So generate a RTL_EXPR
2024 with the proper type and RTL value.
2026 If the last substatement was not an expression,
2027 return something with type `void'. */
2030 expand_end_stmt_expr (t
)
2035 if (last_expr_type
== 0)
2037 last_expr_type
= void_type_node
;
2038 last_expr_value
= const0_rtx
;
2040 else if (last_expr_value
== 0)
2041 /* There are some cases where this can happen, such as when the
2042 statement is void type. */
2043 last_expr_value
= const0_rtx
;
2044 else if (GET_CODE (last_expr_value
) != REG
&& ! CONSTANT_P (last_expr_value
))
2045 /* Remove any possible QUEUED. */
2046 last_expr_value
= protect_from_queue (last_expr_value
, 0);
2050 TREE_TYPE (t
) = last_expr_type
;
2051 RTL_EXPR_RTL (t
) = last_expr_value
;
2052 RTL_EXPR_SEQUENCE (t
) = get_insns ();
2054 rtl_expr_chain
= tree_cons (NULL_TREE
, t
, rtl_expr_chain
);
2058 /* Don't consider deleting this expr or containing exprs at tree level. */
2059 TREE_SIDE_EFFECTS (t
) = 1;
2060 /* Propagate volatility of the actual RTL expr. */
2061 TREE_THIS_VOLATILE (t
) = volatile_refs_p (last_expr_value
);
2064 expr_stmts_for_value
--;
2069 /* Generate RTL for the start of an if-then. COND is the expression
2070 whose truth should be tested.
2072 If EXITFLAG is nonzero, this conditional is visible to
2073 `exit_something'. */
2076 expand_start_cond (cond
, exitflag
)
2080 struct nesting
*thiscond
= ALLOC_NESTING ();
2082 /* Make an entry on cond_stack for the cond we are entering. */
2084 thiscond
->next
= cond_stack
;
2085 thiscond
->all
= nesting_stack
;
2086 thiscond
->depth
= ++nesting_depth
;
2087 thiscond
->data
.cond
.next_label
= gen_label_rtx ();
2088 /* Before we encounter an `else', we don't need a separate exit label
2089 unless there are supposed to be exit statements
2090 to exit this conditional. */
2091 thiscond
->exit_label
= exitflag
? gen_label_rtx () : 0;
2092 thiscond
->data
.cond
.endif_label
= thiscond
->exit_label
;
2093 cond_stack
= thiscond
;
2094 nesting_stack
= thiscond
;
2096 do_jump (cond
, thiscond
->data
.cond
.next_label
, NULL_RTX
);
2099 /* Generate RTL between then-clause and the elseif-clause
2100 of an if-then-elseif-.... */
2103 expand_start_elseif (cond
)
2106 if (cond_stack
->data
.cond
.endif_label
== 0)
2107 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
2108 emit_jump (cond_stack
->data
.cond
.endif_label
);
2109 emit_label (cond_stack
->data
.cond
.next_label
);
2110 cond_stack
->data
.cond
.next_label
= gen_label_rtx ();
2111 do_jump (cond
, cond_stack
->data
.cond
.next_label
, NULL_RTX
);
2114 /* Generate RTL between the then-clause and the else-clause
2115 of an if-then-else. */
2118 expand_start_else ()
2120 if (cond_stack
->data
.cond
.endif_label
== 0)
2121 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
2123 emit_jump (cond_stack
->data
.cond
.endif_label
);
2124 emit_label (cond_stack
->data
.cond
.next_label
);
2125 cond_stack
->data
.cond
.next_label
= 0; /* No more _else or _elseif calls. */
2128 /* After calling expand_start_else, turn this "else" into an "else if"
2129 by providing another condition. */
2132 expand_elseif (cond
)
2135 cond_stack
->data
.cond
.next_label
= gen_label_rtx ();
2136 do_jump (cond
, cond_stack
->data
.cond
.next_label
, NULL_RTX
);
2139 /* Generate RTL for the end of an if-then.
2140 Pop the record for it off of cond_stack. */
2145 struct nesting
*thiscond
= cond_stack
;
2147 do_pending_stack_adjust ();
2148 if (thiscond
->data
.cond
.next_label
)
2149 emit_label (thiscond
->data
.cond
.next_label
);
2150 if (thiscond
->data
.cond
.endif_label
)
2151 emit_label (thiscond
->data
.cond
.endif_label
);
2153 POPSTACK (cond_stack
);
2159 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2160 loop should be exited by `exit_something'. This is a loop for which
2161 `expand_continue' will jump to the top of the loop.
2163 Make an entry on loop_stack to record the labels associated with
2167 expand_start_loop (exit_flag
)
2170 register struct nesting
*thisloop
= ALLOC_NESTING ();
2172 /* Make an entry on loop_stack for the loop we are entering. */
2174 thisloop
->next
= loop_stack
;
2175 thisloop
->all
= nesting_stack
;
2176 thisloop
->depth
= ++nesting_depth
;
2177 thisloop
->data
.loop
.start_label
= gen_label_rtx ();
2178 thisloop
->data
.loop
.end_label
= gen_label_rtx ();
2179 thisloop
->data
.loop
.alt_end_label
= 0;
2180 thisloop
->data
.loop
.continue_label
= thisloop
->data
.loop
.start_label
;
2181 thisloop
->exit_label
= exit_flag
? thisloop
->data
.loop
.end_label
: 0;
2182 loop_stack
= thisloop
;
2183 nesting_stack
= thisloop
;
2185 do_pending_stack_adjust ();
2187 emit_note (NULL_PTR
, NOTE_INSN_LOOP_BEG
);
2188 emit_label (thisloop
->data
.loop
.start_label
);
2193 /* Like expand_start_loop but for a loop where the continuation point
2194 (for expand_continue_loop) will be specified explicitly. */
2197 expand_start_loop_continue_elsewhere (exit_flag
)
2200 struct nesting
*thisloop
= expand_start_loop (exit_flag
);
2201 loop_stack
->data
.loop
.continue_label
= gen_label_rtx ();
2205 /* Specify the continuation point for a loop started with
2206 expand_start_loop_continue_elsewhere.
2207 Use this at the point in the code to which a continue statement
2211 expand_loop_continue_here ()
2213 do_pending_stack_adjust ();
2214 emit_note (NULL_PTR
, NOTE_INSN_LOOP_CONT
);
2215 emit_label (loop_stack
->data
.loop
.continue_label
);
2218 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2219 Pop the block off of loop_stack. */
2224 rtx start_label
= loop_stack
->data
.loop
.start_label
;
2225 rtx insn
= get_last_insn ();
2226 int needs_end_jump
= 1;
2228 /* Mark the continue-point at the top of the loop if none elsewhere. */
2229 if (start_label
== loop_stack
->data
.loop
.continue_label
)
2230 emit_note_before (NOTE_INSN_LOOP_CONT
, start_label
);
2232 do_pending_stack_adjust ();
2234 /* If optimizing, perhaps reorder the loop.
2235 First, try to use a condjump near the end.
2236 expand_exit_loop_if_false ends loops with unconditional jumps,
2239 if (test) goto label;
2241 goto loop_stack->data.loop.end_label
2245 If we find such a pattern, we can end the loop earlier. */
2248 && GET_CODE (insn
) == CODE_LABEL
2249 && LABEL_NAME (insn
) == NULL
2250 && GET_CODE (PREV_INSN (insn
)) == BARRIER
)
2253 rtx jump
= PREV_INSN (PREV_INSN (label
));
2255 if (GET_CODE (jump
) == JUMP_INSN
2256 && GET_CODE (PATTERN (jump
)) == SET
2257 && SET_DEST (PATTERN (jump
)) == pc_rtx
2258 && GET_CODE (SET_SRC (PATTERN (jump
))) == LABEL_REF
2259 && (XEXP (SET_SRC (PATTERN (jump
)), 0)
2260 == loop_stack
->data
.loop
.end_label
))
2264 /* The test might be complex and reference LABEL multiple times,
2265 like the loop in loop_iterations to set vtop. To handle this,
2267 insn
= PREV_INSN (label
);
2268 reorder_insns (label
, label
, start_label
);
2270 for (prev
= PREV_INSN (jump
); ; prev
= PREV_INSN (prev
))
2272 /* We ignore line number notes, but if we see any other note,
2273 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2274 NOTE_INSN_LOOP_*, we disable this optimization. */
2275 if (GET_CODE (prev
) == NOTE
)
2277 if (NOTE_LINE_NUMBER (prev
) < 0)
2281 if (GET_CODE (prev
) == CODE_LABEL
)
2283 if (GET_CODE (prev
) == JUMP_INSN
)
2285 if (GET_CODE (PATTERN (prev
)) == SET
2286 && SET_DEST (PATTERN (prev
)) == pc_rtx
2287 && GET_CODE (SET_SRC (PATTERN (prev
))) == IF_THEN_ELSE
2288 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev
)), 1))
2290 && XEXP (XEXP (SET_SRC (PATTERN (prev
)), 1), 0) == label
)
2292 XEXP (XEXP (SET_SRC (PATTERN (prev
)), 1), 0)
2294 emit_note_after (NOTE_INSN_LOOP_END
, prev
);
2303 /* If the loop starts with a loop exit, roll that to the end where
2304 it will optimize together with the jump back.
2306 We look for the conditional branch to the exit, except that once
2307 we find such a branch, we don't look past 30 instructions.
2309 In more detail, if the loop presently looks like this (in pseudo-C):
2312 if (test) goto end_label;
2317 transform it to look like:
2323 if (test) goto end_label;
2324 goto newstart_label;
2327 Here, the `test' may actually consist of some reasonably complex
2328 code, terminating in a test. */
2333 ! (GET_CODE (insn
) == JUMP_INSN
2334 && GET_CODE (PATTERN (insn
)) == SET
2335 && SET_DEST (PATTERN (insn
)) == pc_rtx
2336 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
))
2340 rtx last_test_insn
= NULL_RTX
;
2342 /* Scan insns from the top of the loop looking for a qualified
2343 conditional exit. */
2344 for (insn
= NEXT_INSN (loop_stack
->data
.loop
.start_label
); insn
;
2345 insn
= NEXT_INSN (insn
))
2347 if (GET_CODE (insn
) == NOTE
)
2350 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
2351 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
2352 /* The code that actually moves the exit test will
2353 carefully leave BLOCK notes in their original
2354 location. That means, however, that we can't debug
2355 the exit test itself. So, we refuse to move code
2356 containing BLOCK notes at low optimization levels. */
2359 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
2361 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_END
)
2365 /* We've come to the end of an EH region, but
2366 never saw the beginning of that region. That
2367 means that an EH region begins before the top
2368 of the loop, and ends in the middle of it. The
2369 existence of such a situation violates a basic
2370 assumption in this code, since that would imply
2371 that even when EH_REGIONS is zero, we might
2372 move code out of an exception region. */
2376 /* We must not walk into a nested loop. */
2377 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
2380 /* We already know this INSN is a NOTE, so there's no
2381 point in looking at it to see if it's a JUMP. */
2385 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == INSN
)
2388 if (last_test_insn
&& num_insns
> 30)
2392 /* We don't want to move a partial EH region. Consider:
2406 This isn't legal C++, but here's what it's supposed to
2407 mean: if cond() is true, stop looping. Otherwise,
2408 call bar, and keep looping. In addition, if cond
2409 throws an exception, catch it and keep looping. Such
2410 constructs are certainy legal in LISP.
2412 We should not move the `if (cond()) 0' test since then
2413 the EH-region for the try-block would be broken up.
2414 (In this case we would the EH_BEG note for the `try'
2415 and `if cond()' but not the call to bar() or the
2418 So we don't look for tests within an EH region. */
2421 if (GET_CODE (insn
) == JUMP_INSN
2422 && GET_CODE (PATTERN (insn
)) == SET
2423 && SET_DEST (PATTERN (insn
)) == pc_rtx
)
2425 /* This is indeed a jump. */
2426 rtx dest1
= NULL_RTX
;
2427 rtx dest2
= NULL_RTX
;
2428 rtx potential_last_test
;
2429 if (GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
)
2431 /* A conditional jump. */
2432 dest1
= XEXP (SET_SRC (PATTERN (insn
)), 1);
2433 dest2
= XEXP (SET_SRC (PATTERN (insn
)), 2);
2434 potential_last_test
= insn
;
2438 /* An unconditional jump. */
2439 dest1
= SET_SRC (PATTERN (insn
));
2440 /* Include the BARRIER after the JUMP. */
2441 potential_last_test
= NEXT_INSN (insn
);
2445 if (dest1
&& GET_CODE (dest1
) == LABEL_REF
2446 && ((XEXP (dest1
, 0)
2447 == loop_stack
->data
.loop
.alt_end_label
)
2449 == loop_stack
->data
.loop
.end_label
)))
2451 last_test_insn
= potential_last_test
;
2455 /* If this was a conditional jump, there may be
2456 another label at which we should look. */
2463 if (last_test_insn
!= 0 && last_test_insn
!= get_last_insn ())
2465 /* We found one. Move everything from there up
2466 to the end of the loop, and add a jump into the loop
2467 to jump to there. */
2468 register rtx newstart_label
= gen_label_rtx ();
2469 register rtx start_move
= start_label
;
2472 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2473 then we want to move this note also. */
2474 if (GET_CODE (PREV_INSN (start_move
)) == NOTE
2475 && (NOTE_LINE_NUMBER (PREV_INSN (start_move
))
2476 == NOTE_INSN_LOOP_CONT
))
2477 start_move
= PREV_INSN (start_move
);
2479 emit_label_after (newstart_label
, PREV_INSN (start_move
));
2481 /* Actually move the insns. Start at the beginning, and
2482 keep copying insns until we've copied the
2484 for (insn
= start_move
; insn
; insn
= next_insn
)
2486 /* Figure out which insn comes after this one. We have
2487 to do this before we move INSN. */
2488 if (insn
== last_test_insn
)
2489 /* We've moved all the insns. */
2490 next_insn
= NULL_RTX
;
2492 next_insn
= NEXT_INSN (insn
);
2494 if (GET_CODE (insn
) == NOTE
2495 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
2496 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
2497 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2498 NOTE_INSN_BLOCK_ENDs because the correct generation
2499 of debugging information depends on these appearing
2500 in the same order in the RTL and in the tree
2501 structure, where they are represented as BLOCKs.
2502 So, we don't move block notes. Of course, moving
2503 the code inside the block is likely to make it
2504 impossible to debug the instructions in the exit
2505 test, but such is the price of optimization. */
2508 /* Move the INSN. */
2509 reorder_insns (insn
, insn
, get_last_insn ());
2512 emit_jump_insn_after (gen_jump (start_label
),
2513 PREV_INSN (newstart_label
));
2514 emit_barrier_after (PREV_INSN (newstart_label
));
2515 start_label
= newstart_label
;
2521 emit_jump (start_label
);
2522 emit_note (NULL_PTR
, NOTE_INSN_LOOP_END
);
2524 emit_label (loop_stack
->data
.loop
.end_label
);
2526 POPSTACK (loop_stack
);
2531 /* Generate a jump to the current loop's continue-point.
2532 This is usually the top of the loop, but may be specified
2533 explicitly elsewhere. If not currently inside a loop,
2534 return 0 and do nothing; caller will print an error message. */
2537 expand_continue_loop (whichloop
)
2538 struct nesting
*whichloop
;
2542 whichloop
= loop_stack
;
2545 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.continue_label
,
2550 /* Generate a jump to exit the current loop. If not currently inside a loop,
2551 return 0 and do nothing; caller will print an error message. */
2554 expand_exit_loop (whichloop
)
2555 struct nesting
*whichloop
;
2559 whichloop
= loop_stack
;
2562 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.end_label
, NULL_RTX
);
2566 /* Generate a conditional jump to exit the current loop if COND
2567 evaluates to zero. If not currently inside a loop,
2568 return 0 and do nothing; caller will print an error message. */
2571 expand_exit_loop_if_false (whichloop
, cond
)
2572 struct nesting
*whichloop
;
2575 rtx label
= gen_label_rtx ();
2580 whichloop
= loop_stack
;
2583 /* In order to handle fixups, we actually create a conditional jump
2584 around a unconditional branch to exit the loop. If fixups are
2585 necessary, they go before the unconditional branch. */
2588 do_jump (cond
, NULL_RTX
, label
);
2589 last_insn
= get_last_insn ();
2590 if (GET_CODE (last_insn
) == CODE_LABEL
)
2591 whichloop
->data
.loop
.alt_end_label
= last_insn
;
2592 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.end_label
,
2599 /* Return nonzero if the loop nest is empty. Else return zero. */
2602 stmt_loop_nest_empty ()
2604 return (loop_stack
== NULL
);
2607 /* Return non-zero if we should preserve sub-expressions as separate
2608 pseudos. We never do so if we aren't optimizing. We always do so
2609 if -fexpensive-optimizations.
2611 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2612 the loop may still be a small one. */
2615 preserve_subexpressions_p ()
2619 if (flag_expensive_optimizations
)
2622 if (optimize
== 0 || current_function
== 0
2623 || current_function
->stmt
== 0 || loop_stack
== 0)
2626 insn
= get_last_insn_anywhere ();
2629 && (INSN_UID (insn
) - INSN_UID (loop_stack
->data
.loop
.start_label
)
2630 < n_non_fixed_regs
* 3));
2634 /* Generate a jump to exit the current loop, conditional, binding contour
2635 or case statement. Not all such constructs are visible to this function,
2636 only those started with EXIT_FLAG nonzero. Individual languages use
2637 the EXIT_FLAG parameter to control which kinds of constructs you can
2640 If not currently inside anything that can be exited,
2641 return 0 and do nothing; caller will print an error message. */
2644 expand_exit_something ()
2648 for (n
= nesting_stack
; n
; n
= n
->all
)
2649 if (n
->exit_label
!= 0)
2651 expand_goto_internal (NULL_TREE
, n
->exit_label
, NULL_RTX
);
2658 /* Generate RTL to return from the current function, with no value.
2659 (That is, we do not do anything about returning any value.) */
2662 expand_null_return ()
2664 struct nesting
*block
= block_stack
;
2667 /* Does any pending block have cleanups? */
2669 while (block
&& block
->data
.block
.cleanups
== 0)
2670 block
= block
->next
;
2672 /* If yes, use a goto to return, since that runs cleanups. */
2674 expand_null_return_1 (last_insn
, block
!= 0);
2677 /* Generate RTL to return from the current function, with value VAL. */
2680 expand_value_return (val
)
2683 struct nesting
*block
= block_stack
;
2684 rtx last_insn
= get_last_insn ();
2685 rtx return_reg
= DECL_RTL (DECL_RESULT (current_function_decl
));
2687 /* Copy the value to the return location
2688 unless it's already there. */
2690 if (return_reg
!= val
)
2692 #ifdef PROMOTE_FUNCTION_RETURN
2693 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
2694 int unsignedp
= TREE_UNSIGNED (type
);
2695 enum machine_mode mode
2696 = promote_mode (type
, DECL_MODE (DECL_RESULT (current_function_decl
)),
2699 if (GET_MODE (val
) != VOIDmode
&& GET_MODE (val
) != mode
)
2700 convert_move (return_reg
, val
, unsignedp
);
2703 emit_move_insn (return_reg
, val
);
2705 if (GET_CODE (return_reg
) == REG
2706 && REGNO (return_reg
) < FIRST_PSEUDO_REGISTER
)
2707 emit_insn (gen_rtx_USE (VOIDmode
, return_reg
));
2708 /* Handle calls that return values in multiple non-contiguous locations.
2709 The Irix 6 ABI has examples of this. */
2710 else if (GET_CODE (return_reg
) == PARALLEL
)
2714 for (i
= 0; i
< XVECLEN (return_reg
, 0); i
++)
2716 rtx x
= XEXP (XVECEXP (return_reg
, 0, i
), 0);
2718 if (GET_CODE (x
) == REG
2719 && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
2720 emit_insn (gen_rtx_USE (VOIDmode
, x
));
2724 /* Does any pending block have cleanups? */
2726 while (block
&& block
->data
.block
.cleanups
== 0)
2727 block
= block
->next
;
2729 /* If yes, use a goto to return, since that runs cleanups.
2730 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2732 expand_null_return_1 (last_insn
, block
!= 0);
2735 /* Output a return with no value. If LAST_INSN is nonzero,
2736 pretend that the return takes place after LAST_INSN.
2737 If USE_GOTO is nonzero then don't use a return instruction;
2738 go to the return label instead. This causes any cleanups
2739 of pending blocks to be executed normally. */
2742 expand_null_return_1 (last_insn
, use_goto
)
2746 rtx end_label
= cleanup_label
? cleanup_label
: return_label
;
2748 clear_pending_stack_adjust ();
2749 do_pending_stack_adjust ();
2752 /* PCC-struct return always uses an epilogue. */
2753 if (current_function_returns_pcc_struct
|| use_goto
)
2756 end_label
= return_label
= gen_label_rtx ();
2757 expand_goto_internal (NULL_TREE
, end_label
, last_insn
);
2761 /* Otherwise output a simple return-insn if one is available,
2762 unless it won't do the job. */
2764 if (HAVE_return
&& use_goto
== 0 && cleanup_label
== 0)
2766 emit_jump_insn (gen_return ());
2772 /* Otherwise jump to the epilogue. */
2773 expand_goto_internal (NULL_TREE
, end_label
, last_insn
);
2776 /* Generate RTL to evaluate the expression RETVAL and return it
2777 from the current function. */
2780 expand_return (retval
)
2783 /* If there are any cleanups to be performed, then they will
2784 be inserted following LAST_INSN. It is desirable
2785 that the last_insn, for such purposes, should be the
2786 last insn before computing the return value. Otherwise, cleanups
2787 which call functions can clobber the return value. */
2788 /* ??? rms: I think that is erroneous, because in C++ it would
2789 run destructors on variables that might be used in the subsequent
2790 computation of the return value. */
2792 register rtx val
= 0;
2797 /* If function wants no value, give it none. */
2798 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
2800 expand_expr (retval
, NULL_RTX
, VOIDmode
, 0);
2802 expand_null_return ();
2806 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2807 /* This is not sufficient. We also need to watch for cleanups of the
2808 expression we are about to expand. Unfortunately, we cannot know
2809 if it has cleanups until we expand it, and we want to change how we
2810 expand it depending upon if we need cleanups. We can't win. */
2812 cleanups
= any_pending_cleanups (1);
2817 if (TREE_CODE (retval
) == RESULT_DECL
)
2818 retval_rhs
= retval
;
2819 else if ((TREE_CODE (retval
) == MODIFY_EXPR
|| TREE_CODE (retval
) == INIT_EXPR
)
2820 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
2821 retval_rhs
= TREE_OPERAND (retval
, 1);
2822 else if (TREE_TYPE (retval
) == void_type_node
)
2823 /* Recognize tail-recursive call to void function. */
2824 retval_rhs
= retval
;
2826 retval_rhs
= NULL_TREE
;
2828 /* Only use `last_insn' if there are cleanups which must be run. */
2829 if (cleanups
|| cleanup_label
!= 0)
2830 last_insn
= get_last_insn ();
2832 /* Distribute return down conditional expr if either of the sides
2833 may involve tail recursion (see test below). This enhances the number
2834 of tail recursions we see. Don't do this always since it can produce
2835 sub-optimal code in some cases and we distribute assignments into
2836 conditional expressions when it would help. */
2838 if (optimize
&& retval_rhs
!= 0
2839 && frame_offset
== 0
2840 && TREE_CODE (retval_rhs
) == COND_EXPR
2841 && (TREE_CODE (TREE_OPERAND (retval_rhs
, 1)) == CALL_EXPR
2842 || TREE_CODE (TREE_OPERAND (retval_rhs
, 2)) == CALL_EXPR
))
2844 rtx label
= gen_label_rtx ();
2847 do_jump (TREE_OPERAND (retval_rhs
, 0), label
, NULL_RTX
);
2848 start_cleanup_deferral ();
2849 expr
= build (MODIFY_EXPR
, TREE_TYPE (TREE_TYPE (current_function_decl
)),
2850 DECL_RESULT (current_function_decl
),
2851 TREE_OPERAND (retval_rhs
, 1));
2852 TREE_SIDE_EFFECTS (expr
) = 1;
2853 expand_return (expr
);
2856 expr
= build (MODIFY_EXPR
, TREE_TYPE (TREE_TYPE (current_function_decl
)),
2857 DECL_RESULT (current_function_decl
),
2858 TREE_OPERAND (retval_rhs
, 2));
2859 TREE_SIDE_EFFECTS (expr
) = 1;
2860 expand_return (expr
);
2861 end_cleanup_deferral ();
2865 /* Attempt to optimize the call if it is tail recursive. */
2866 if (optimize_tail_recursion (retval_rhs
, last_insn
))
2870 /* This optimization is safe if there are local cleanups
2871 because expand_null_return takes care of them.
2872 ??? I think it should also be safe when there is a cleanup label,
2873 because expand_null_return takes care of them, too.
2874 Any reason why not? */
2875 if (HAVE_return
&& cleanup_label
== 0
2876 && ! current_function_returns_pcc_struct
2877 && BRANCH_COST
<= 1)
2879 /* If this is return x == y; then generate
2880 if (x == y) return 1; else return 0;
2881 if we can do it with explicit return insns and branches are cheap,
2882 but not if we have the corresponding scc insn. */
2885 switch (TREE_CODE (retval_rhs
))
2911 case TRUTH_ANDIF_EXPR
:
2912 case TRUTH_ORIF_EXPR
:
2913 case TRUTH_AND_EXPR
:
2915 case TRUTH_NOT_EXPR
:
2916 case TRUTH_XOR_EXPR
:
2919 op0
= gen_label_rtx ();
2920 jumpifnot (retval_rhs
, op0
);
2921 expand_value_return (const1_rtx
);
2923 expand_value_return (const0_rtx
);
2932 #endif /* HAVE_return */
2934 /* If the result is an aggregate that is being returned in one (or more)
2935 registers, load the registers here. The compiler currently can't handle
2936 copying a BLKmode value into registers. We could put this code in a
2937 more general area (for use by everyone instead of just function
2938 call/return), but until this feature is generally usable it is kept here
2939 (and in expand_call). The value must go into a pseudo in case there
2940 are cleanups that will clobber the real return register. */
2943 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
2944 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
)
2946 int i
, bitpos
, xbitpos
;
2947 int big_endian_correction
= 0;
2948 int bytes
= int_size_in_bytes (TREE_TYPE (retval_rhs
));
2949 int n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2950 int bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs
)),
2951 (unsigned int)BITS_PER_WORD
);
2952 rtx
*result_pseudos
= (rtx
*) alloca (sizeof (rtx
) * n_regs
);
2953 rtx result_reg
, src
= NULL_RTX
, dst
= NULL_RTX
;
2954 rtx result_val
= expand_expr (retval_rhs
, NULL_RTX
, VOIDmode
, 0);
2955 enum machine_mode tmpmode
, result_reg_mode
;
2957 /* Structures whose size is not a multiple of a word are aligned
2958 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2959 machine, this means we must skip the empty high order bytes when
2960 calculating the bit offset. */
2961 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2962 big_endian_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2965 /* Copy the structure BITSIZE bits at a time. */
2966 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2967 bitpos
< bytes
* BITS_PER_UNIT
;
2968 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2970 /* We need a new destination pseudo each time xbitpos is
2971 on a word boundary and when xbitpos == big_endian_correction
2972 (the first time through). */
2973 if (xbitpos
% BITS_PER_WORD
== 0
2974 || xbitpos
== big_endian_correction
)
2976 /* Generate an appropriate register. */
2977 dst
= gen_reg_rtx (word_mode
);
2978 result_pseudos
[xbitpos
/ BITS_PER_WORD
] = dst
;
2980 /* Clobber the destination before we move anything into it. */
2981 emit_insn (gen_rtx_CLOBBER (VOIDmode
, dst
));
2984 /* We need a new source operand each time bitpos is on a word
2986 if (bitpos
% BITS_PER_WORD
== 0)
2987 src
= operand_subword_force (result_val
,
2988 bitpos
/ BITS_PER_WORD
,
2991 /* Use bitpos for the source extraction (left justified) and
2992 xbitpos for the destination store (right justified). */
2993 store_bit_field (dst
, bitsize
, xbitpos
% BITS_PER_WORD
, word_mode
,
2994 extract_bit_field (src
, bitsize
,
2995 bitpos
% BITS_PER_WORD
, 1,
2996 NULL_RTX
, word_mode
,
2998 bitsize
/ BITS_PER_UNIT
,
3000 bitsize
/ BITS_PER_UNIT
, BITS_PER_WORD
);
3003 /* Find the smallest integer mode large enough to hold the
3004 entire structure and use that mode instead of BLKmode
3005 on the USE insn for the return register. */
3006 bytes
= int_size_in_bytes (TREE_TYPE (retval_rhs
));
3007 for (tmpmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3008 tmpmode
!= VOIDmode
;
3009 tmpmode
= GET_MODE_WIDER_MODE (tmpmode
))
3011 /* Have we found a large enough mode? */
3012 if (GET_MODE_SIZE (tmpmode
) >= bytes
)
3016 /* No suitable mode found. */
3017 if (tmpmode
== VOIDmode
)
3020 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl
)), tmpmode
);
3022 if (GET_MODE_SIZE (tmpmode
) < GET_MODE_SIZE (word_mode
))
3023 result_reg_mode
= word_mode
;
3025 result_reg_mode
= tmpmode
;
3026 result_reg
= gen_reg_rtx (result_reg_mode
);
3029 for (i
= 0; i
< n_regs
; i
++)
3030 emit_move_insn (operand_subword (result_reg
, i
, 0, result_reg_mode
),
3033 if (tmpmode
!= result_reg_mode
)
3034 result_reg
= gen_lowpart (tmpmode
, result_reg
);
3036 expand_value_return (result_reg
);
3040 && TREE_TYPE (retval_rhs
) != void_type_node
3041 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
)
3043 /* Calculate the return value into a pseudo reg. */
3044 val
= gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl
)));
3045 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), 0);
3046 val
= force_not_mem (val
);
3048 /* Return the calculated value, doing cleanups first. */
3049 expand_value_return (val
);
3053 /* No cleanups or no hard reg used;
3054 calculate value into hard return reg. */
3055 expand_expr (retval
, const0_rtx
, VOIDmode
, 0);
3057 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl
)));
3061 /* Return 1 if the end of the generated RTX is not a barrier.
3062 This means code already compiled can drop through. */
3065 drop_through_at_end_p ()
3067 rtx insn
= get_last_insn ();
3068 while (insn
&& GET_CODE (insn
) == NOTE
)
3069 insn
= PREV_INSN (insn
);
3070 return insn
&& GET_CODE (insn
) != BARRIER
;
3073 /* Test CALL_EXPR to determine if it is a potential tail recursion call
3074 and emit code to optimize the tail recursion. LAST_INSN indicates where
3075 to place the jump to the tail recursion label. Return TRUE if the
3076 call was optimized into a goto.
3078 This is only used by expand_return, but expand_call is expected to
3082 optimize_tail_recursion (call_expr
, last_insn
)
3086 /* For tail-recursive call to current function,
3087 just jump back to the beginning.
3088 It's unsafe if any auto variable in this function
3089 has its address taken; for simplicity,
3090 require stack frame to be empty. */
3091 if (optimize
&& call_expr
!= 0
3092 && frame_offset
== 0
3093 && TREE_CODE (call_expr
) == CALL_EXPR
3094 && TREE_CODE (TREE_OPERAND (call_expr
, 0)) == ADDR_EXPR
3095 && TREE_OPERAND (TREE_OPERAND (call_expr
, 0), 0) == current_function_decl
3096 /* Finish checking validity, and if valid emit code
3097 to set the argument variables for the new call. */
3098 && tail_recursion_args (TREE_OPERAND (call_expr
, 1),
3099 DECL_ARGUMENTS (current_function_decl
)))
3101 if (tail_recursion_label
== 0)
3103 tail_recursion_label
= gen_label_rtx ();
3104 emit_label_after (tail_recursion_label
,
3105 tail_recursion_reentry
);
3108 expand_goto_internal (NULL_TREE
, tail_recursion_label
, last_insn
);
3116 /* Emit code to alter this function's formal parms for a tail-recursive call.
3117 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3118 FORMALS is the chain of decls of formals.
3119 Return 1 if this can be done;
3120 otherwise return 0 and do not emit any code. */
3123 tail_recursion_args (actuals
, formals
)
3124 tree actuals
, formals
;
3126 register tree a
= actuals
, f
= formals
;
3128 register rtx
*argvec
;
3130 /* Check that number and types of actuals are compatible
3131 with the formals. This is not always true in valid C code.
3132 Also check that no formal needs to be addressable
3133 and that all formals are scalars. */
3135 /* Also count the args. */
3137 for (a
= actuals
, f
= formals
, i
= 0; a
&& f
; a
= TREE_CHAIN (a
), f
= TREE_CHAIN (f
), i
++)
3139 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a
)))
3140 != TYPE_MAIN_VARIANT (TREE_TYPE (f
)))
3142 if (GET_CODE (DECL_RTL (f
)) != REG
|| DECL_MODE (f
) == BLKmode
)
3145 if (a
!= 0 || f
!= 0)
3148 /* Compute all the actuals. */
3150 argvec
= (rtx
*) alloca (i
* sizeof (rtx
));
3152 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
3153 argvec
[i
] = expand_expr (TREE_VALUE (a
), NULL_RTX
, VOIDmode
, 0);
3155 /* Find which actual values refer to current values of previous formals.
3156 Copy each of them now, before any formal is changed. */
3158 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
3162 for (f
= formals
, j
= 0; j
< i
; f
= TREE_CHAIN (f
), j
++)
3163 if (reg_mentioned_p (DECL_RTL (f
), argvec
[i
]))
3164 { copy
= 1; break; }
3166 argvec
[i
] = copy_to_reg (argvec
[i
]);
3169 /* Store the values of the actuals into the formals. */
3171 for (f
= formals
, a
= actuals
, i
= 0; f
;
3172 f
= TREE_CHAIN (f
), a
= TREE_CHAIN (a
), i
++)
3174 if (GET_MODE (DECL_RTL (f
)) == GET_MODE (argvec
[i
]))
3175 emit_move_insn (DECL_RTL (f
), argvec
[i
]);
3177 convert_move (DECL_RTL (f
), argvec
[i
],
3178 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a
))));
3185 /* Generate the RTL code for entering a binding contour.
3186 The variables are declared one by one, by calls to `expand_decl'.
3188 FLAGS is a bitwise or of the following flags:
3190 1 - Nonzero if this construct should be visible to
3193 2 - Nonzero if this contour does not require a
3194 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3195 language-independent code should set this flag because they
3196 will not create corresponding BLOCK nodes. (There should be
3197 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3198 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3199 when expand_end_bindings is called. */
3202 expand_start_bindings (flags
)
3205 struct nesting
*thisblock
= ALLOC_NESTING ();
3207 int exit_flag
= ((flags
& 1) != 0);
3208 int block_flag
= ((flags
& 2) == 0);
3210 note
= emit_note (NULL_PTR
,
3211 block_flag
? NOTE_INSN_BLOCK_BEG
: NOTE_INSN_DELETED
);
3213 /* Make an entry on block_stack for the block we are entering. */
3215 thisblock
->next
= block_stack
;
3216 thisblock
->all
= nesting_stack
;
3217 thisblock
->depth
= ++nesting_depth
;
3218 thisblock
->data
.block
.stack_level
= 0;
3219 thisblock
->data
.block
.cleanups
= 0;
3220 thisblock
->data
.block
.n_function_calls
= 0;
3221 thisblock
->data
.block
.exception_region
= 0;
3222 thisblock
->data
.block
.block_target_temp_slot_level
= target_temp_slot_level
;
3224 thisblock
->data
.block
.conditional_code
= 0;
3225 thisblock
->data
.block
.last_unconditional_cleanup
= note
;
3226 /* When we insert instructions after the last unconditional cleanup,
3227 we don't adjust last_insn. That means that a later add_insn will
3228 clobber the instructions we've just added. The easiest way to
3229 fix this is to just insert another instruction here, so that the
3230 instructions inserted after the last unconditional cleanup are
3231 never the last instruction. */
3232 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
3233 thisblock
->data
.block
.cleanup_ptr
= &thisblock
->data
.block
.cleanups
;
3236 && !(block_stack
->data
.block
.cleanups
== NULL_TREE
3237 && block_stack
->data
.block
.outer_cleanups
== NULL_TREE
))
3238 thisblock
->data
.block
.outer_cleanups
3239 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
3240 block_stack
->data
.block
.outer_cleanups
);
3242 thisblock
->data
.block
.outer_cleanups
= 0;
3243 thisblock
->data
.block
.label_chain
= 0;
3244 thisblock
->data
.block
.innermost_stack_block
= stack_block_stack
;
3245 thisblock
->data
.block
.first_insn
= note
;
3246 thisblock
->data
.block
.block_start_count
= ++current_block_start_count
;
3247 thisblock
->exit_label
= exit_flag
? gen_label_rtx () : 0;
3248 block_stack
= thisblock
;
3249 nesting_stack
= thisblock
;
3251 /* Make a new level for allocating stack slots. */
3255 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3256 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3257 expand_expr are made. After we end the region, we know that all
3258 space for all temporaries that were created by TARGET_EXPRs will be
3259 destroyed and their space freed for reuse. */
3262 expand_start_target_temps ()
3264 /* This is so that even if the result is preserved, the space
3265 allocated will be freed, as we know that it is no longer in use. */
3268 /* Start a new binding layer that will keep track of all cleanup
3269 actions to be performed. */
3270 expand_start_bindings (2);
3272 target_temp_slot_level
= temp_slot_level
;
3276 expand_end_target_temps ()
3278 expand_end_bindings (NULL_TREE
, 0, 0);
3280 /* This is so that even if the result is preserved, the space
3281 allocated will be freed, as we know that it is no longer in use. */
3285 /* Mark top block of block_stack as an implicit binding for an
3286 exception region. This is used to prevent infinite recursion when
3287 ending a binding with expand_end_bindings. It is only ever called
3288 by expand_eh_region_start, as that it the only way to create a
3289 block stack for a exception region. */
3292 mark_block_as_eh_region ()
3294 block_stack
->data
.block
.exception_region
= 1;
3295 if (block_stack
->next
3296 && block_stack
->next
->data
.block
.conditional_code
)
3298 block_stack
->data
.block
.conditional_code
3299 = block_stack
->next
->data
.block
.conditional_code
;
3300 block_stack
->data
.block
.last_unconditional_cleanup
3301 = block_stack
->next
->data
.block
.last_unconditional_cleanup
;
3302 block_stack
->data
.block
.cleanup_ptr
3303 = block_stack
->next
->data
.block
.cleanup_ptr
;
3307 /* True if we are currently emitting insns in an area of output code
3308 that is controlled by a conditional expression. This is used by
3309 the cleanup handling code to generate conditional cleanup actions. */
3312 conditional_context ()
3314 return block_stack
&& block_stack
->data
.block
.conditional_code
;
3317 /* Mark top block of block_stack as not for an implicit binding for an
3318 exception region. This is only ever done by expand_eh_region_end
3319 to let expand_end_bindings know that it is being called explicitly
3320 to end the binding layer for just the binding layer associated with
3321 the exception region, otherwise expand_end_bindings would try and
3322 end all implicit binding layers for exceptions regions, and then
3323 one normal binding layer. */
3326 mark_block_as_not_eh_region ()
3328 block_stack
->data
.block
.exception_region
= 0;
3331 /* True if the top block of block_stack was marked as for an exception
3332 region by mark_block_as_eh_region. */
3337 return (current_function
&& block_stack
3338 && block_stack
->data
.block
.exception_region
);
3341 /* Emit a handler label for a nonlocal goto handler.
3342 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3345 expand_nl_handler_label (slot
, before_insn
)
3346 rtx slot
, before_insn
;
3349 rtx handler_label
= gen_label_rtx ();
3351 /* Don't let jump_optimize delete the handler. */
3352 LABEL_PRESERVE_P (handler_label
) = 1;
3355 emit_move_insn (slot
, gen_rtx_LABEL_REF (Pmode
, handler_label
));
3356 insns
= get_insns ();
3358 emit_insns_before (insns
, before_insn
);
3360 emit_label (handler_label
);
3362 return handler_label
;
3365 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3368 expand_nl_goto_receiver ()
3370 #ifdef HAVE_nonlocal_goto
3371 if (! HAVE_nonlocal_goto
)
3373 /* First adjust our frame pointer to its actual value. It was
3374 previously set to the start of the virtual area corresponding to
3375 the stacked variables when we branched here and now needs to be
3376 adjusted to the actual hardware fp value.
3378 Assignments are to virtual registers are converted by
3379 instantiate_virtual_regs into the corresponding assignment
3380 to the underlying register (fp in this case) that makes
3381 the original assignment true.
3382 So the following insn will actually be
3383 decrementing fp by STARTING_FRAME_OFFSET. */
3384 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
3386 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3387 if (fixed_regs
[ARG_POINTER_REGNUM
])
3389 #ifdef ELIMINABLE_REGS
3390 /* If the argument pointer can be eliminated in favor of the
3391 frame pointer, we don't need to restore it. We assume here
3392 that if such an elimination is present, it can always be used.
3393 This is the case on all known machines; if we don't make this
3394 assumption, we do unnecessary saving on many machines. */
3395 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
3398 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
3399 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
3400 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
3403 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
3406 /* Now restore our arg pointer from the address at which it
3407 was saved in our stack frame.
3408 If there hasn't be space allocated for it yet, make
3410 if (arg_pointer_save_area
== 0)
3411 arg_pointer_save_area
3412 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
3413 emit_move_insn (virtual_incoming_args_rtx
,
3414 /* We need a pseudo here, or else
3415 instantiate_virtual_regs_1 complains. */
3416 copy_to_reg (arg_pointer_save_area
));
3421 #ifdef HAVE_nonlocal_goto_receiver
3422 if (HAVE_nonlocal_goto_receiver
)
3423 emit_insn (gen_nonlocal_goto_receiver ());
3427 /* Make handlers for nonlocal gotos taking place in the function calls in
3431 expand_nl_goto_receivers (thisblock
)
3432 struct nesting
*thisblock
;
3435 rtx afterward
= gen_label_rtx ();
3440 /* Record the handler address in the stack slot for that purpose,
3441 during this block, saving and restoring the outer value. */
3442 if (thisblock
->next
!= 0)
3443 for (slot
= nonlocal_goto_handler_slots
; slot
; slot
= XEXP (slot
, 1))
3445 rtx save_receiver
= gen_reg_rtx (Pmode
);
3446 emit_move_insn (XEXP (slot
, 0), save_receiver
);
3449 emit_move_insn (save_receiver
, XEXP (slot
, 0));
3450 insns
= get_insns ();
3452 emit_insns_before (insns
, thisblock
->data
.block
.first_insn
);
3455 /* Jump around the handlers; they run only when specially invoked. */
3456 emit_jump (afterward
);
3458 /* Make a separate handler for each label. */
3459 link
= nonlocal_labels
;
3460 slot
= nonlocal_goto_handler_slots
;
3461 label_list
= NULL_RTX
;
3462 for (; link
; link
= TREE_CHAIN (link
), slot
= XEXP (slot
, 1))
3463 /* Skip any labels we shouldn't be able to jump to from here,
3464 we generate one special handler for all of them below which just calls
3466 if (! DECL_TOO_LATE (TREE_VALUE (link
)))
3469 lab
= expand_nl_handler_label (XEXP (slot
, 0),
3470 thisblock
->data
.block
.first_insn
);
3471 label_list
= gen_rtx_EXPR_LIST (VOIDmode
, lab
, label_list
);
3473 expand_nl_goto_receiver ();
3475 /* Jump to the "real" nonlocal label. */
3476 expand_goto (TREE_VALUE (link
));
3479 /* A second pass over all nonlocal labels; this time we handle those
3480 we should not be able to jump to at this point. */
3481 link
= nonlocal_labels
;
3482 slot
= nonlocal_goto_handler_slots
;
3484 for (; link
; link
= TREE_CHAIN (link
), slot
= XEXP (slot
, 1))
3485 if (DECL_TOO_LATE (TREE_VALUE (link
)))
3488 lab
= expand_nl_handler_label (XEXP (slot
, 0),
3489 thisblock
->data
.block
.first_insn
);
3490 label_list
= gen_rtx_EXPR_LIST (VOIDmode
, lab
, label_list
);
3496 expand_nl_goto_receiver ();
3497 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "abort"), 0,
3502 nonlocal_goto_handler_labels
= label_list
;
3503 emit_label (afterward
);
3506 /* Warn about any unused VARS (which may contain nodes other than
3507 VAR_DECLs, but such nodes are ignored). The nodes are connected
3508 via the TREE_CHAIN field. */
3511 warn_about_unused_variables (vars
)
3517 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
3518 if (TREE_CODE (decl
) == VAR_DECL
3519 && ! TREE_USED (decl
)
3520 && ! DECL_IN_SYSTEM_HEADER (decl
)
3521 && DECL_NAME (decl
) && ! DECL_ARTIFICIAL (decl
))
3522 warning_with_decl (decl
, "unused variable `%s'");
3525 /* Generate RTL code to terminate a binding contour.
3527 VARS is the chain of VAR_DECL nodes for the variables bound in this
3528 contour. There may actually be other nodes in this chain, but any
3529 nodes other than VAR_DECLS are ignored.
3531 MARK_ENDS is nonzero if we should put a note at the beginning
3532 and end of this binding contour.
3534 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3535 (That is true automatically if the contour has a saved stack level.) */
3538 expand_end_bindings (vars
, mark_ends
, dont_jump_in
)
3543 register struct nesting
*thisblock
;
3546 while (block_stack
->data
.block
.exception_region
)
3548 /* Because we don't need or want a new temporary level and
3549 because we didn't create one in expand_eh_region_start,
3550 create a fake one now to avoid removing one in
3551 expand_end_bindings. */
3554 block_stack
->data
.block
.exception_region
= 0;
3556 expand_end_bindings (NULL_TREE
, 0, 0);
3559 /* Since expand_eh_region_start does an expand_start_bindings, we
3560 have to first end all the bindings that were created by
3561 expand_eh_region_start. */
3563 thisblock
= block_stack
;
3565 /* If any of the variables in this scope were not used, warn the
3567 warn_about_unused_variables (vars
);
3569 if (thisblock
->exit_label
)
3571 do_pending_stack_adjust ();
3572 emit_label (thisblock
->exit_label
);
3575 /* If necessary, make handlers for nonlocal gotos taking
3576 place in the function calls in this block. */
3577 if (function_call_count
!= thisblock
->data
.block
.n_function_calls
3579 /* Make handler for outermost block
3580 if there were any nonlocal gotos to this function. */
3581 && (thisblock
->next
== 0 ? current_function_has_nonlocal_label
3582 /* Make handler for inner block if it has something
3583 special to do when you jump out of it. */
3584 : (thisblock
->data
.block
.cleanups
!= 0
3585 || thisblock
->data
.block
.stack_level
!= 0)))
3586 expand_nl_goto_receivers (thisblock
);
3588 /* Don't allow jumping into a block that has a stack level.
3589 Cleanups are allowed, though. */
3591 || thisblock
->data
.block
.stack_level
!= 0)
3593 struct label_chain
*chain
;
3595 /* Any labels in this block are no longer valid to go to.
3596 Mark them to cause an error message. */
3597 for (chain
= thisblock
->data
.block
.label_chain
; chain
; chain
= chain
->next
)
3599 DECL_TOO_LATE (chain
->label
) = 1;
3600 /* If any goto without a fixup came to this label,
3601 that must be an error, because gotos without fixups
3602 come from outside all saved stack-levels. */
3603 if (TREE_ADDRESSABLE (chain
->label
))
3604 error_with_decl (chain
->label
,
3605 "label `%s' used before containing binding contour");
3609 /* Restore stack level in effect before the block
3610 (only if variable-size objects allocated). */
3611 /* Perform any cleanups associated with the block. */
3613 if (thisblock
->data
.block
.stack_level
!= 0
3614 || thisblock
->data
.block
.cleanups
!= 0)
3616 /* Only clean up here if this point can actually be reached. */
3617 int reachable
= GET_CODE (get_last_insn ()) != BARRIER
;
3619 /* Don't let cleanups affect ({...}) constructs. */
3620 int old_expr_stmts_for_value
= expr_stmts_for_value
;
3621 rtx old_last_expr_value
= last_expr_value
;
3622 tree old_last_expr_type
= last_expr_type
;
3623 expr_stmts_for_value
= 0;
3625 /* Do the cleanups. */
3626 expand_cleanups (thisblock
->data
.block
.cleanups
, NULL_TREE
, 0, reachable
);
3628 do_pending_stack_adjust ();
3630 expr_stmts_for_value
= old_expr_stmts_for_value
;
3631 last_expr_value
= old_last_expr_value
;
3632 last_expr_type
= old_last_expr_type
;
3634 /* Restore the stack level. */
3636 if (reachable
&& thisblock
->data
.block
.stack_level
!= 0)
3638 emit_stack_restore (thisblock
->next
? SAVE_BLOCK
: SAVE_FUNCTION
,
3639 thisblock
->data
.block
.stack_level
, NULL_RTX
);
3640 if (nonlocal_goto_handler_slots
!= 0)
3641 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
,
3645 /* Any gotos out of this block must also do these things.
3646 Also report any gotos with fixups that came to labels in this
3648 fixup_gotos (thisblock
,
3649 thisblock
->data
.block
.stack_level
,
3650 thisblock
->data
.block
.cleanups
,
3651 thisblock
->data
.block
.first_insn
,
3655 /* Mark the beginning and end of the scope if requested.
3656 We do this now, after running cleanups on the variables
3657 just going out of scope, so they are in scope for their cleanups. */
3660 emit_note (NULL_PTR
, NOTE_INSN_BLOCK_END
);
3662 /* Get rid of the beginning-mark if we don't make an end-mark. */
3663 NOTE_LINE_NUMBER (thisblock
->data
.block
.first_insn
) = NOTE_INSN_DELETED
;
3665 /* If doing stupid register allocation, make sure lives of all
3666 register variables declared here extend thru end of scope. */
3669 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
3670 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_RTL (decl
))
3671 use_variable (DECL_RTL (decl
));
3673 /* Restore the temporary level of TARGET_EXPRs. */
3674 target_temp_slot_level
= thisblock
->data
.block
.block_target_temp_slot_level
;
3676 /* Restore block_stack level for containing block. */
3678 stack_block_stack
= thisblock
->data
.block
.innermost_stack_block
;
3679 POPSTACK (block_stack
);
3681 /* Pop the stack slot nesting and free any slots at this level. */
3685 /* Generate RTL for the automatic variable declaration DECL.
3686 (Other kinds of declarations are simply ignored if seen here.) */
3692 struct nesting
*thisblock
;
3695 type
= TREE_TYPE (decl
);
3697 /* Only automatic variables need any expansion done.
3698 Static and external variables, and external functions,
3699 will be handled by `assemble_variable' (called from finish_decl).
3700 TYPE_DECL and CONST_DECL require nothing.
3701 PARM_DECLs are handled in `assign_parms'. */
3703 if (TREE_CODE (decl
) != VAR_DECL
)
3705 if (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
3708 thisblock
= block_stack
;
3710 /* Create the RTL representation for the variable. */
3712 if (type
== error_mark_node
)
3713 DECL_RTL (decl
) = gen_rtx_MEM (BLKmode
, const0_rtx
);
3714 else if (DECL_SIZE (decl
) == 0)
3715 /* Variable with incomplete type. */
3717 if (DECL_INITIAL (decl
) == 0)
3718 /* Error message was already done; now avoid a crash. */
3719 DECL_RTL (decl
) = assign_stack_temp (DECL_MODE (decl
), 0, 1);
3721 /* An initializer is going to decide the size of this array.
3722 Until we know the size, represent its address with a reg. */
3723 DECL_RTL (decl
) = gen_rtx_MEM (BLKmode
, gen_reg_rtx (Pmode
));
3724 MEM_SET_IN_STRUCT_P (DECL_RTL (decl
), AGGREGATE_TYPE_P (type
));
3726 else if (DECL_MODE (decl
) != BLKmode
3727 /* If -ffloat-store, don't put explicit float vars
3729 && !(flag_float_store
3730 && TREE_CODE (type
) == REAL_TYPE
)
3731 && ! TREE_THIS_VOLATILE (decl
)
3732 && ! TREE_ADDRESSABLE (decl
)
3733 && (DECL_REGISTER (decl
) || ! obey_regdecls
)
3734 /* if -fcheck-memory-usage, check all variables. */
3735 && ! current_function_check_memory_usage
)
3737 /* Automatic variable that can go in a register. */
3738 int unsignedp
= TREE_UNSIGNED (type
);
3739 enum machine_mode reg_mode
3740 = promote_mode (type
, DECL_MODE (decl
), &unsignedp
, 0);
3742 DECL_RTL (decl
) = gen_reg_rtx (reg_mode
);
3743 mark_user_reg (DECL_RTL (decl
));
3745 if (POINTER_TYPE_P (type
))
3746 mark_reg_pointer (DECL_RTL (decl
),
3747 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl
)))
3751 else if (TREE_CODE (DECL_SIZE (decl
)) == INTEGER_CST
3752 && ! (flag_stack_check
&& ! STACK_CHECK_BUILTIN
3753 && (TREE_INT_CST_HIGH (DECL_SIZE (decl
)) != 0
3754 || (TREE_INT_CST_LOW (DECL_SIZE (decl
))
3755 > STACK_CHECK_MAX_VAR_SIZE
* BITS_PER_UNIT
))))
3757 /* Variable of fixed size that goes on the stack. */
3761 /* If we previously made RTL for this decl, it must be an array
3762 whose size was determined by the initializer.
3763 The old address was a register; set that register now
3764 to the proper address. */
3765 if (DECL_RTL (decl
) != 0)
3767 if (GET_CODE (DECL_RTL (decl
)) != MEM
3768 || GET_CODE (XEXP (DECL_RTL (decl
), 0)) != REG
)
3770 oldaddr
= XEXP (DECL_RTL (decl
), 0);
3773 DECL_RTL (decl
) = assign_temp (TREE_TYPE (decl
), 1, 1, 1);
3774 MEM_SET_IN_STRUCT_P (DECL_RTL (decl
),
3775 AGGREGATE_TYPE_P (TREE_TYPE (decl
)));
3777 /* Set alignment we actually gave this decl. */
3778 DECL_ALIGN (decl
) = (DECL_MODE (decl
) == BLKmode
? BIGGEST_ALIGNMENT
3779 : GET_MODE_BITSIZE (DECL_MODE (decl
)));
3783 addr
= force_operand (XEXP (DECL_RTL (decl
), 0), oldaddr
);
3784 if (addr
!= oldaddr
)
3785 emit_move_insn (oldaddr
, addr
);
3788 /* If this is a memory ref that contains aggregate components,
3789 mark it as such for cse and loop optimize. */
3790 MEM_SET_IN_STRUCT_P (DECL_RTL (decl
),
3791 AGGREGATE_TYPE_P (TREE_TYPE (decl
)));
3793 /* If this is in memory because of -ffloat-store,
3794 set the volatile bit, to prevent optimizations from
3795 undoing the effects. */
3796 if (flag_float_store
&& TREE_CODE (type
) == REAL_TYPE
)
3797 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
3800 MEM_ALIAS_SET (DECL_RTL (decl
)) = get_alias_set (decl
);
3803 /* Dynamic-size object: must push space on the stack. */
3807 /* Record the stack pointer on entry to block, if have
3808 not already done so. */
3809 if (thisblock
->data
.block
.stack_level
== 0)
3811 do_pending_stack_adjust ();
3812 emit_stack_save (thisblock
->next
? SAVE_BLOCK
: SAVE_FUNCTION
,
3813 &thisblock
->data
.block
.stack_level
,
3814 thisblock
->data
.block
.first_insn
);
3815 stack_block_stack
= thisblock
;
3818 /* Compute the variable's size, in bytes. */
3819 size
= expand_expr (size_binop (CEIL_DIV_EXPR
,
3821 size_int (BITS_PER_UNIT
)),
3822 NULL_RTX
, VOIDmode
, 0);
3825 /* Allocate space on the stack for the variable. Note that
3826 DECL_ALIGN says how the variable is to be aligned and we
3827 cannot use it to conclude anything about the alignment of
3829 address
= allocate_dynamic_stack_space (size
, NULL_RTX
,
3830 TYPE_ALIGN (TREE_TYPE (decl
)));
3832 /* Reference the variable indirect through that rtx. */
3833 DECL_RTL (decl
) = gen_rtx_MEM (DECL_MODE (decl
), address
);
3835 /* If this is a memory ref that contains aggregate components,
3836 mark it as such for cse and loop optimize. */
3837 MEM_SET_IN_STRUCT_P (DECL_RTL (decl
),
3838 AGGREGATE_TYPE_P (TREE_TYPE (decl
)));
3840 /* Indicate the alignment we actually gave this variable. */
3841 #ifdef STACK_BOUNDARY
3842 DECL_ALIGN (decl
) = STACK_BOUNDARY
;
3844 DECL_ALIGN (decl
) = BIGGEST_ALIGNMENT
;
3848 if (TREE_THIS_VOLATILE (decl
))
3849 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
3850 #if 0 /* A variable is not necessarily unchanging
3851 just because it is const. RTX_UNCHANGING_P
3852 means no change in the function,
3853 not merely no change in the variable's scope.
3854 It is correct to set RTX_UNCHANGING_P if the variable's scope
3855 is the whole function. There's no convenient way to test that. */
3856 if (TREE_READONLY (decl
))
3857 RTX_UNCHANGING_P (DECL_RTL (decl
)) = 1;
3860 /* If doing stupid register allocation, make sure life of any
3861 register variable starts here, at the start of its scope. */
3864 use_variable (DECL_RTL (decl
));
3869 /* Emit code to perform the initialization of a declaration DECL. */
3872 expand_decl_init (decl
)
3875 int was_used
= TREE_USED (decl
);
3877 /* If this is a CONST_DECL, we don't have to generate any code, but
3878 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3879 to be set while in the obstack containing the constant. If we don't
3880 do this, we can lose if we have functions nested three deep and the middle
3881 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3882 the innermost function is the first to expand that STRING_CST. */
3883 if (TREE_CODE (decl
) == CONST_DECL
)
3885 if (DECL_INITIAL (decl
) && TREE_CONSTANT (DECL_INITIAL (decl
)))
3886 expand_expr (DECL_INITIAL (decl
), NULL_RTX
, VOIDmode
,
3887 EXPAND_INITIALIZER
);
3891 if (TREE_STATIC (decl
))
3894 /* Compute and store the initial value now. */
3896 if (DECL_INITIAL (decl
) == error_mark_node
)
3898 enum tree_code code
= TREE_CODE (TREE_TYPE (decl
));
3900 if (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== ENUMERAL_TYPE
3901 || code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
3902 expand_assignment (decl
, convert (TREE_TYPE (decl
), integer_zero_node
),
3906 else if (DECL_INITIAL (decl
) && TREE_CODE (DECL_INITIAL (decl
)) != TREE_LIST
)
3908 emit_line_note (DECL_SOURCE_FILE (decl
), DECL_SOURCE_LINE (decl
));
3909 expand_assignment (decl
, DECL_INITIAL (decl
), 0, 0);
3913 /* Don't let the initialization count as "using" the variable. */
3914 TREE_USED (decl
) = was_used
;
3916 /* Free any temporaries we made while initializing the decl. */
3917 preserve_temp_slots (NULL_RTX
);
3921 /* CLEANUP is an expression to be executed at exit from this binding contour;
3922 for example, in C++, it might call the destructor for this variable.
3924 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3925 CLEANUP multiple times, and have the correct semantics. This
3926 happens in exception handling, for gotos, returns, breaks that
3927 leave the current scope.
3929 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3930 that is not associated with any particular variable. */
3933 expand_decl_cleanup (decl
, cleanup
)
3936 struct nesting
*thisblock
;
3938 /* Error if we are not in any block. */
3939 if (current_function
== 0 || block_stack
== 0)
3942 thisblock
= block_stack
;
3944 /* Record the cleanup if there is one. */
3950 tree
*cleanups
= &thisblock
->data
.block
.cleanups
;
3951 int cond_context
= conditional_context ();
3955 rtx flag
= gen_reg_rtx (word_mode
);
3960 emit_move_insn (flag
, const0_rtx
);
3961 set_flag_0
= get_insns ();
3964 thisblock
->data
.block
.last_unconditional_cleanup
3965 = emit_insns_after (set_flag_0
,
3966 thisblock
->data
.block
.last_unconditional_cleanup
);
3968 emit_move_insn (flag
, const1_rtx
);
3970 /* All cleanups must be on the function_obstack. */
3971 push_obstacks_nochange ();
3972 resume_temporary_allocation ();
3974 cond
= build_decl (VAR_DECL
, NULL_TREE
, type_for_mode (word_mode
, 1));
3975 DECL_RTL (cond
) = flag
;
3977 /* Conditionalize the cleanup. */
3978 cleanup
= build (COND_EXPR
, void_type_node
,
3979 truthvalue_conversion (cond
),
3980 cleanup
, integer_zero_node
);
3981 cleanup
= fold (cleanup
);
3985 cleanups
= thisblock
->data
.block
.cleanup_ptr
;
3988 /* All cleanups must be on the function_obstack. */
3989 push_obstacks_nochange ();
3990 resume_temporary_allocation ();
3991 cleanup
= unsave_expr (cleanup
);
3994 t
= *cleanups
= temp_tree_cons (decl
, cleanup
, *cleanups
);
3997 /* If this block has a cleanup, it belongs in stack_block_stack. */
3998 stack_block_stack
= thisblock
;
4005 /* If this was optimized so that there is no exception region for the
4006 cleanup, then mark the TREE_LIST node, so that we can later tell
4007 if we need to call expand_eh_region_end. */
4008 if (! using_eh_for_cleanups_p
4009 || expand_eh_region_start_tree (decl
, cleanup
))
4010 TREE_ADDRESSABLE (t
) = 1;
4011 /* If that started a new EH region, we're in a new block. */
4012 thisblock
= block_stack
;
4019 thisblock
->data
.block
.last_unconditional_cleanup
4020 = emit_insns_after (seq
,
4021 thisblock
->data
.block
.last_unconditional_cleanup
);
4025 thisblock
->data
.block
.last_unconditional_cleanup
4027 thisblock
->data
.block
.cleanup_ptr
= &thisblock
->data
.block
.cleanups
;
4033 /* Like expand_decl_cleanup, but suppress generating an exception handler
4034 to perform the cleanup. */
4038 expand_decl_cleanup_no_eh (decl
, cleanup
)
4041 int save_eh
= using_eh_for_cleanups_p
;
4044 using_eh_for_cleanups_p
= 0;
4045 result
= expand_decl_cleanup (decl
, cleanup
);
4046 using_eh_for_cleanups_p
= save_eh
;
4052 /* Arrange for the top element of the dynamic cleanup chain to be
4053 popped if we exit the current binding contour. DECL is the
4054 associated declaration, if any, otherwise NULL_TREE. If the
4055 current contour is left via an exception, then __sjthrow will pop
4056 the top element off the dynamic cleanup chain. The code that
4057 avoids doing the action we push into the cleanup chain in the
4058 exceptional case is contained in expand_cleanups.
4060 This routine is only used by expand_eh_region_start, and that is
4061 the only way in which an exception region should be started. This
4062 routine is only used when using the setjmp/longjmp codegen method
4063 for exception handling. */
4066 expand_dcc_cleanup (decl
)
4069 struct nesting
*thisblock
;
4072 /* Error if we are not in any block. */
4073 if (current_function
== 0 || block_stack
== 0)
4075 thisblock
= block_stack
;
4077 /* Record the cleanup for the dynamic handler chain. */
4079 /* All cleanups must be on the function_obstack. */
4080 push_obstacks_nochange ();
4081 resume_temporary_allocation ();
4082 cleanup
= make_node (POPDCC_EXPR
);
4085 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4086 thisblock
->data
.block
.cleanups
4087 = temp_tree_cons (decl
, cleanup
, thisblock
->data
.block
.cleanups
);
4089 /* If this block has a cleanup, it belongs in stack_block_stack. */
4090 stack_block_stack
= thisblock
;
4094 /* Arrange for the top element of the dynamic handler chain to be
4095 popped if we exit the current binding contour. DECL is the
4096 associated declaration, if any, otherwise NULL_TREE. If the current
4097 contour is left via an exception, then __sjthrow will pop the top
4098 element off the dynamic handler chain. The code that avoids doing
4099 the action we push into the handler chain in the exceptional case
4100 is contained in expand_cleanups.
4102 This routine is only used by expand_eh_region_start, and that is
4103 the only way in which an exception region should be started. This
4104 routine is only used when using the setjmp/longjmp codegen method
4105 for exception handling. */
4108 expand_dhc_cleanup (decl
)
4111 struct nesting
*thisblock
;
4114 /* Error if we are not in any block. */
4115 if (current_function
== 0 || block_stack
== 0)
4117 thisblock
= block_stack
;
4119 /* Record the cleanup for the dynamic handler chain. */
4121 /* All cleanups must be on the function_obstack. */
4122 push_obstacks_nochange ();
4123 resume_temporary_allocation ();
4124 cleanup
= make_node (POPDHC_EXPR
);
4127 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4128 thisblock
->data
.block
.cleanups
4129 = temp_tree_cons (decl
, cleanup
, thisblock
->data
.block
.cleanups
);
4131 /* If this block has a cleanup, it belongs in stack_block_stack. */
4132 stack_block_stack
= thisblock
;
4136 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4137 DECL_ELTS is the list of elements that belong to DECL's type.
4138 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4141 expand_anon_union_decl (decl
, cleanup
, decl_elts
)
4142 tree decl
, cleanup
, decl_elts
;
4144 struct nesting
*thisblock
= current_function
== 0 ? 0 : block_stack
;
4148 /* If any of the elements are addressable, so is the entire union. */
4149 for (t
= decl_elts
; t
; t
= TREE_CHAIN (t
))
4150 if (TREE_ADDRESSABLE (TREE_VALUE (t
)))
4152 TREE_ADDRESSABLE (decl
) = 1;
4157 expand_decl_cleanup (decl
, cleanup
);
4158 x
= DECL_RTL (decl
);
4160 /* Go through the elements, assigning RTL to each. */
4161 for (t
= decl_elts
; t
; t
= TREE_CHAIN (t
))
4163 tree decl_elt
= TREE_VALUE (t
);
4164 tree cleanup_elt
= TREE_PURPOSE (t
);
4165 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (decl_elt
));
4167 /* Propagate the union's alignment to the elements. */
4168 DECL_ALIGN (decl_elt
) = DECL_ALIGN (decl
);
4170 /* If the element has BLKmode and the union doesn't, the union is
4171 aligned such that the element doesn't need to have BLKmode, so
4172 change the element's mode to the appropriate one for its size. */
4173 if (mode
== BLKmode
&& DECL_MODE (decl
) != BLKmode
)
4174 DECL_MODE (decl_elt
) = mode
4175 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt
)),
4178 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4179 instead create a new MEM rtx with the proper mode. */
4180 if (GET_CODE (x
) == MEM
)
4182 if (mode
== GET_MODE (x
))
4183 DECL_RTL (decl_elt
) = x
;
4186 DECL_RTL (decl_elt
) = gen_rtx_MEM (mode
, copy_rtx (XEXP (x
, 0)));
4187 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt
), x
);
4188 RTX_UNCHANGING_P (DECL_RTL (decl_elt
)) = RTX_UNCHANGING_P (x
);
4191 else if (GET_CODE (x
) == REG
)
4193 if (mode
== GET_MODE (x
))
4194 DECL_RTL (decl_elt
) = x
;
4196 DECL_RTL (decl_elt
) = gen_rtx_SUBREG (mode
, x
, 0);
4201 /* Record the cleanup if there is one. */
4204 thisblock
->data
.block
.cleanups
4205 = temp_tree_cons (decl_elt
, cleanup_elt
,
4206 thisblock
->data
.block
.cleanups
);
4210 /* Expand a list of cleanups LIST.
4211 Elements may be expressions or may be nested lists.
4213 If DONT_DO is nonnull, then any list-element
4214 whose TREE_PURPOSE matches DONT_DO is omitted.
4215 This is sometimes used to avoid a cleanup associated with
4216 a value that is being returned out of the scope.
4218 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4219 goto and handle protection regions specially in that case.
4221 If REACHABLE, we emit code, otherwise just inform the exception handling
4222 code about this finalization. */
4225 expand_cleanups (list
, dont_do
, in_fixup
, reachable
)
4232 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4233 if (dont_do
== 0 || TREE_PURPOSE (tail
) != dont_do
)
4235 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4236 expand_cleanups (TREE_VALUE (tail
), dont_do
, in_fixup
, reachable
);
4241 tree cleanup
= TREE_VALUE (tail
);
4243 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4244 if (TREE_CODE (cleanup
) != POPDHC_EXPR
4245 && TREE_CODE (cleanup
) != POPDCC_EXPR
4246 /* See expand_eh_region_start_tree for this case. */
4247 && ! TREE_ADDRESSABLE (tail
))
4249 cleanup
= protect_with_terminate (cleanup
);
4250 expand_eh_region_end (cleanup
);
4256 /* Cleanups may be run multiple times. For example,
4257 when exiting a binding contour, we expand the
4258 cleanups associated with that contour. When a goto
4259 within that binding contour has a target outside that
4260 contour, it will expand all cleanups from its scope to
4261 the target. Though the cleanups are expanded multiple
4262 times, the control paths are non-overlapping so the
4263 cleanups will not be executed twice. */
4265 /* We may need to protect fixups with rethrow regions. */
4266 int protect
= (in_fixup
&& ! TREE_ADDRESSABLE (tail
));
4269 expand_fixup_region_start ();
4271 expand_expr (TREE_VALUE (tail
), const0_rtx
, VOIDmode
, 0);
4273 expand_fixup_region_end (TREE_VALUE (tail
));
4280 /* Mark when the context we are emitting RTL for as a conditional
4281 context, so that any cleanup actions we register with
4282 expand_decl_init will be properly conditionalized when those
4283 cleanup actions are later performed. Must be called before any
4284 expression (tree) is expanded that is within a conditional context. */
4287 start_cleanup_deferral ()
4289 /* block_stack can be NULL if we are inside the parameter list. It is
4290 OK to do nothing, because cleanups aren't possible here. */
4292 ++block_stack
->data
.block
.conditional_code
;
4295 /* Mark the end of a conditional region of code. Because cleanup
4296 deferrals may be nested, we may still be in a conditional region
4297 after we end the currently deferred cleanups, only after we end all
4298 deferred cleanups, are we back in unconditional code. */
4301 end_cleanup_deferral ()
4303 /* block_stack can be NULL if we are inside the parameter list. It is
4304 OK to do nothing, because cleanups aren't possible here. */
4306 --block_stack
->data
.block
.conditional_code
;
4309 /* Move all cleanups from the current block_stack
4310 to the containing block_stack, where they are assumed to
4311 have been created. If anything can cause a temporary to
4312 be created, but not expanded for more than one level of
4313 block_stacks, then this code will have to change. */
4318 struct nesting
*block
= block_stack
;
4319 struct nesting
*outer
= block
->next
;
4321 outer
->data
.block
.cleanups
4322 = chainon (block
->data
.block
.cleanups
,
4323 outer
->data
.block
.cleanups
);
4324 block
->data
.block
.cleanups
= 0;
4328 last_cleanup_this_contour ()
4330 if (block_stack
== 0)
4333 return block_stack
->data
.block
.cleanups
;
4336 /* Return 1 if there are any pending cleanups at this point.
4337 If THIS_CONTOUR is nonzero, check the current contour as well.
4338 Otherwise, look only at the contours that enclose this one. */
4341 any_pending_cleanups (this_contour
)
4344 struct nesting
*block
;
4346 if (current_function
== NULL
|| current_function
->stmt
== NULL
4347 || block_stack
== 0)
4350 if (this_contour
&& block_stack
->data
.block
.cleanups
!= NULL
)
4352 if (block_stack
->data
.block
.cleanups
== 0
4353 && block_stack
->data
.block
.outer_cleanups
== 0)
4356 for (block
= block_stack
->next
; block
; block
= block
->next
)
4357 if (block
->data
.block
.cleanups
!= 0)
4363 /* Enter a case (Pascal) or switch (C) statement.
4364 Push a block onto case_stack and nesting_stack
4365 to accumulate the case-labels that are seen
4366 and to record the labels generated for the statement.
4368 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4369 Otherwise, this construct is transparent for `exit_something'.
4371 EXPR is the index-expression to be dispatched on.
4372 TYPE is its nominal type. We could simply convert EXPR to this type,
4373 but instead we take short cuts. */
4376 expand_start_case (exit_flag
, expr
, type
, printname
)
4380 const char *printname
;
4382 register struct nesting
*thiscase
= ALLOC_NESTING ();
4384 /* Make an entry on case_stack for the case we are entering. */
4386 thiscase
->next
= case_stack
;
4387 thiscase
->all
= nesting_stack
;
4388 thiscase
->depth
= ++nesting_depth
;
4389 thiscase
->exit_label
= exit_flag
? gen_label_rtx () : 0;
4390 thiscase
->data
.case_stmt
.case_list
= 0;
4391 thiscase
->data
.case_stmt
.index_expr
= expr
;
4392 thiscase
->data
.case_stmt
.nominal_type
= type
;
4393 thiscase
->data
.case_stmt
.default_label
= 0;
4394 thiscase
->data
.case_stmt
.num_ranges
= 0;
4395 thiscase
->data
.case_stmt
.printname
= printname
;
4396 thiscase
->data
.case_stmt
.line_number_status
= force_line_numbers ();
4397 case_stack
= thiscase
;
4398 nesting_stack
= thiscase
;
4400 do_pending_stack_adjust ();
4402 /* Make sure case_stmt.start points to something that won't
4403 need any transformation before expand_end_case. */
4404 if (GET_CODE (get_last_insn ()) != NOTE
)
4405 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
4407 thiscase
->data
.case_stmt
.start
= get_last_insn ();
4409 start_cleanup_deferral ();
4413 /* Start a "dummy case statement" within which case labels are invalid
4414 and are not connected to any larger real case statement.
4415 This can be used if you don't want to let a case statement jump
4416 into the middle of certain kinds of constructs. */
4419 expand_start_case_dummy ()
4421 register struct nesting
*thiscase
= ALLOC_NESTING ();
4423 /* Make an entry on case_stack for the dummy. */
4425 thiscase
->next
= case_stack
;
4426 thiscase
->all
= nesting_stack
;
4427 thiscase
->depth
= ++nesting_depth
;
4428 thiscase
->exit_label
= 0;
4429 thiscase
->data
.case_stmt
.case_list
= 0;
4430 thiscase
->data
.case_stmt
.start
= 0;
4431 thiscase
->data
.case_stmt
.nominal_type
= 0;
4432 thiscase
->data
.case_stmt
.default_label
= 0;
4433 thiscase
->data
.case_stmt
.num_ranges
= 0;
4434 case_stack
= thiscase
;
4435 nesting_stack
= thiscase
;
4436 start_cleanup_deferral ();
4439 /* End a dummy case statement. */
4442 expand_end_case_dummy ()
4444 end_cleanup_deferral ();
4445 POPSTACK (case_stack
);
4448 /* Return the data type of the index-expression
4449 of the innermost case statement, or null if none. */
4452 case_index_expr_type ()
4455 return TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
4462 /* If this is the first label, warn if any insns have been emitted. */
4463 if (case_stack
->data
.case_stmt
.line_number_status
>= 0)
4467 restore_line_number_status
4468 (case_stack
->data
.case_stmt
.line_number_status
);
4469 case_stack
->data
.case_stmt
.line_number_status
= -1;
4471 for (insn
= case_stack
->data
.case_stmt
.start
;
4473 insn
= NEXT_INSN (insn
))
4475 if (GET_CODE (insn
) == CODE_LABEL
)
4477 if (GET_CODE (insn
) != NOTE
4478 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
4481 insn
= PREV_INSN (insn
);
4482 while (insn
&& (GET_CODE (insn
) != NOTE
|| NOTE_LINE_NUMBER (insn
) < 0));
4484 /* If insn is zero, then there must have been a syntax error. */
4486 warning_with_file_and_line (NOTE_SOURCE_FILE(insn
),
4487 NOTE_LINE_NUMBER(insn
),
4488 "unreachable code at beginning of %s",
4489 case_stack
->data
.case_stmt
.printname
);
4496 /* Accumulate one case or default label inside a case or switch statement.
4497 VALUE is the value of the case (a null pointer, for a default label).
4498 The function CONVERTER, when applied to arguments T and V,
4499 converts the value V to the type T.
4501 If not currently inside a case or switch statement, return 1 and do
4502 nothing. The caller will print a language-specific error message.
4503 If VALUE is a duplicate or overlaps, return 2 and do nothing
4504 except store the (first) duplicate node in *DUPLICATE.
4505 If VALUE is out of range, return 3 and do nothing.
4506 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4507 Return 0 on success.
4509 Extended to handle range statements. */
4512 pushcase (value
, converter
, label
, duplicate
)
4513 register tree value
;
4514 tree (*converter
) PROTO((tree
, tree
));
4515 register tree label
;
4521 /* Fail if not inside a real case statement. */
4522 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
4525 if (stack_block_stack
4526 && stack_block_stack
->depth
> case_stack
->depth
)
4529 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
4530 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
4532 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4533 if (index_type
== error_mark_node
)
4536 /* Convert VALUE to the type in which the comparisons are nominally done. */
4538 value
= (*converter
) (nominal_type
, value
);
4542 /* Fail if this value is out of range for the actual type of the index
4543 (which may be narrower than NOMINAL_TYPE). */
4544 if (value
!= 0 && ! int_fits_type_p (value
, index_type
))
4547 /* Fail if this is a duplicate or overlaps another entry. */
4550 if (case_stack
->data
.case_stmt
.default_label
!= 0)
4552 *duplicate
= case_stack
->data
.case_stmt
.default_label
;
4555 case_stack
->data
.case_stmt
.default_label
= label
;
4558 return add_case_node (value
, value
, label
, duplicate
);
4560 expand_label (label
);
4564 /* Like pushcase but this case applies to all values between VALUE1 and
4565 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4566 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4567 starts at VALUE1 and ends at the highest value of the index type.
4568 If both are NULL, this case applies to all values.
4570 The return value is the same as that of pushcase but there is one
4571 additional error code: 4 means the specified range was empty. */
4574 pushcase_range (value1
, value2
, converter
, label
, duplicate
)
4575 register tree value1
, value2
;
4576 tree (*converter
) PROTO((tree
, tree
));
4577 register tree label
;
4583 /* Fail if not inside a real case statement. */
4584 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
4587 if (stack_block_stack
4588 && stack_block_stack
->depth
> case_stack
->depth
)
4591 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
4592 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
4594 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4595 if (index_type
== error_mark_node
)
4600 /* Convert VALUEs to type in which the comparisons are nominally done
4601 and replace any unspecified value with the corresponding bound. */
4603 value1
= TYPE_MIN_VALUE (index_type
);
4605 value2
= TYPE_MAX_VALUE (index_type
);
4607 /* Fail if the range is empty. Do this before any conversion since
4608 we want to allow out-of-range empty ranges. */
4609 if (value2
&& tree_int_cst_lt (value2
, value1
))
4612 value1
= (*converter
) (nominal_type
, value1
);
4614 /* If the max was unbounded, use the max of the nominal_type we are
4615 converting to. Do this after the < check above to suppress false
4618 value2
= TYPE_MAX_VALUE (nominal_type
);
4619 value2
= (*converter
) (nominal_type
, value2
);
4621 /* Fail if these values are out of range. */
4622 if (TREE_CONSTANT_OVERFLOW (value1
)
4623 || ! int_fits_type_p (value1
, index_type
))
4626 if (TREE_CONSTANT_OVERFLOW (value2
)
4627 || ! int_fits_type_p (value2
, index_type
))
4630 return add_case_node (value1
, value2
, label
, duplicate
);
4633 /* Do the actual insertion of a case label for pushcase and pushcase_range
4634 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4635 slowdown for large switch statements. */
4638 add_case_node (low
, high
, label
, duplicate
)
4643 struct case_node
*p
, **q
, *r
;
4645 q
= &case_stack
->data
.case_stmt
.case_list
;
4652 /* Keep going past elements distinctly greater than HIGH. */
4653 if (tree_int_cst_lt (high
, p
->low
))
4656 /* or distinctly less than LOW. */
4657 else if (tree_int_cst_lt (p
->high
, low
))
4662 /* We have an overlap; this is an error. */
4663 *duplicate
= p
->code_label
;
4668 /* Add this label to the chain, and succeed.
4669 Copy LOW, HIGH so they are on temporary rather than momentary
4670 obstack and will thus survive till the end of the case statement. */
4672 r
= (struct case_node
*) oballoc (sizeof (struct case_node
));
4673 r
->low
= copy_node (low
);
4675 /* If the bounds are equal, turn this into the one-value case. */
4677 if (tree_int_cst_equal (low
, high
))
4681 r
->high
= copy_node (high
);
4682 case_stack
->data
.case_stmt
.num_ranges
++;
4685 r
->code_label
= label
;
4686 expand_label (label
);
4696 struct case_node
*s
;
4702 if (! (b
= p
->balance
))
4703 /* Growth propagation from left side. */
4710 if ((p
->left
= s
= r
->right
))
4719 if ((r
->parent
= s
))
4727 case_stack
->data
.case_stmt
.case_list
= r
;
4730 /* r->balance == +1 */
4735 struct case_node
*t
= r
->right
;
4737 if ((p
->left
= s
= t
->right
))
4741 if ((r
->right
= s
= t
->left
))
4755 if ((t
->parent
= s
))
4763 case_stack
->data
.case_stmt
.case_list
= t
;
4770 /* p->balance == +1; growth of left side balances the node. */
4780 if (! (b
= p
->balance
))
4781 /* Growth propagation from right side. */
4789 if ((p
->right
= s
= r
->left
))
4797 if ((r
->parent
= s
))
4806 case_stack
->data
.case_stmt
.case_list
= r
;
4810 /* r->balance == -1 */
4814 struct case_node
*t
= r
->left
;
4816 if ((p
->right
= s
= t
->left
))
4821 if ((r
->left
= s
= t
->right
))
4835 if ((t
->parent
= s
))
4844 case_stack
->data
.case_stmt
.case_list
= t
;
4850 /* p->balance == -1; growth of right side balances the node. */
4864 /* Returns the number of possible values of TYPE.
4865 Returns -1 if the number is unknown or variable.
4866 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4867 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4868 do not increase monotonically (there may be duplicates);
4869 to 1 if the values increase monotonically, but not always by 1;
4870 otherwise sets it to 0. */
4873 all_cases_count (type
, spareness
)
4877 HOST_WIDE_INT count
;
4880 switch (TREE_CODE (type
))
4887 count
= 1 << BITS_PER_UNIT
;
4891 if (TREE_CODE (TYPE_MIN_VALUE (type
)) != INTEGER_CST
4892 || TYPE_MAX_VALUE (type
) == NULL
4893 || TREE_CODE (TYPE_MAX_VALUE (type
)) != INTEGER_CST
)
4898 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4899 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4900 but with overflow checking. */
4901 tree mint
= TYPE_MIN_VALUE (type
);
4902 tree maxt
= TYPE_MAX_VALUE (type
);
4903 HOST_WIDE_INT lo
, hi
;
4904 neg_double(TREE_INT_CST_LOW (mint
), TREE_INT_CST_HIGH (mint
),
4906 add_double(TREE_INT_CST_LOW (maxt
), TREE_INT_CST_HIGH (maxt
),
4908 add_double (lo
, hi
, 1, 0, &lo
, &hi
);
4909 if (hi
!= 0 || lo
< 0)
4916 for (t
= TYPE_VALUES (type
); t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
4918 if (TREE_CODE (TYPE_MIN_VALUE (type
)) != INTEGER_CST
4919 || TREE_CODE (TREE_VALUE (t
)) != INTEGER_CST
4920 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type
)) + count
4921 != TREE_INT_CST_LOW (TREE_VALUE (t
)))
4925 if (*spareness
== 1)
4927 tree prev
= TREE_VALUE (TYPE_VALUES (type
));
4928 for (t
= TYPE_VALUES (type
); t
= TREE_CHAIN (t
), t
!= NULL_TREE
; )
4930 if (! tree_int_cst_lt (prev
, TREE_VALUE (t
)))
4935 prev
= TREE_VALUE (t
);
4944 #define BITARRAY_TEST(ARRAY, INDEX) \
4945 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4946 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4947 #define BITARRAY_SET(ARRAY, INDEX) \
4948 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4949 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4951 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4952 with the case values we have seen, assuming the case expression
4954 SPARSENESS is as determined by all_cases_count.
4956 The time needed is proportional to COUNT, unless
4957 SPARSENESS is 2, in which case quadratic time is needed. */
4960 mark_seen_cases (type
, cases_seen
, count
, sparseness
)
4962 unsigned char *cases_seen
;
4966 tree next_node_to_try
= NULL_TREE
;
4967 long next_node_offset
= 0;
4969 register struct case_node
*n
, *root
= case_stack
->data
.case_stmt
.case_list
;
4970 tree val
= make_node (INTEGER_CST
);
4971 TREE_TYPE (val
) = type
;
4974 else if (sparseness
== 2)
4979 /* This less efficient loop is only needed to handle
4980 duplicate case values (multiple enum constants
4981 with the same value). */
4982 TREE_TYPE (val
) = TREE_TYPE (root
->low
);
4983 for (t
= TYPE_VALUES (type
), xlo
= 0; t
!= NULL_TREE
;
4984 t
= TREE_CHAIN (t
), xlo
++)
4986 TREE_INT_CST_LOW (val
) = TREE_INT_CST_LOW (TREE_VALUE (t
));
4987 TREE_INT_CST_HIGH (val
) = TREE_INT_CST_HIGH (TREE_VALUE (t
));
4991 /* Keep going past elements distinctly greater than VAL. */
4992 if (tree_int_cst_lt (val
, n
->low
))
4995 /* or distinctly less than VAL. */
4996 else if (tree_int_cst_lt (n
->high
, val
))
5001 /* We have found a matching range. */
5002 BITARRAY_SET (cases_seen
, xlo
);
5012 case_stack
->data
.case_stmt
.case_list
= root
= case_tree2list (root
, 0);
5013 for (n
= root
; n
; n
= n
->right
)
5015 TREE_INT_CST_LOW (val
) = TREE_INT_CST_LOW (n
->low
);
5016 TREE_INT_CST_HIGH (val
) = TREE_INT_CST_HIGH (n
->low
);
5017 while ( ! tree_int_cst_lt (n
->high
, val
))
5019 /* Calculate (into xlo) the "offset" of the integer (val).
5020 The element with lowest value has offset 0, the next smallest
5021 element has offset 1, etc. */
5023 HOST_WIDE_INT xlo
, xhi
;
5025 if (sparseness
&& TYPE_VALUES (type
) != NULL_TREE
)
5027 /* The TYPE_VALUES will be in increasing order, so
5028 starting searching where we last ended. */
5029 t
= next_node_to_try
;
5030 xlo
= next_node_offset
;
5036 t
= TYPE_VALUES (type
);
5039 if (tree_int_cst_equal (val
, TREE_VALUE (t
)))
5041 next_node_to_try
= TREE_CHAIN (t
);
5042 next_node_offset
= xlo
+ 1;
5047 if (t
== next_node_to_try
)
5056 t
= TYPE_MIN_VALUE (type
);
5058 neg_double (TREE_INT_CST_LOW (t
), TREE_INT_CST_HIGH (t
),
5062 add_double (xlo
, xhi
,
5063 TREE_INT_CST_LOW (val
), TREE_INT_CST_HIGH (val
),
5067 if (xhi
== 0 && xlo
>= 0 && xlo
< count
)
5068 BITARRAY_SET (cases_seen
, xlo
);
5069 add_double (TREE_INT_CST_LOW (val
), TREE_INT_CST_HIGH (val
),
5071 &TREE_INT_CST_LOW (val
), &TREE_INT_CST_HIGH (val
));
5077 /* Called when the index of a switch statement is an enumerated type
5078 and there is no default label.
5080 Checks that all enumeration literals are covered by the case
5081 expressions of a switch. Also, warn if there are any extra
5082 switch cases that are *not* elements of the enumerated type.
5084 If all enumeration literals were covered by the case expressions,
5085 turn one of the expressions into the default expression since it should
5086 not be possible to fall through such a switch. */
5089 check_for_full_enumeration_handling (type
)
5092 register struct case_node
*n
;
5093 register tree chain
;
5094 #if 0 /* variable used by 'if 0'ed code below. */
5095 register struct case_node
**l
;
5099 /* True iff the selector type is a numbered set mode. */
5102 /* The number of possible selector values. */
5105 /* For each possible selector value. a one iff it has been matched
5106 by a case value alternative. */
5107 unsigned char *cases_seen
;
5109 /* The allocated size of cases_seen, in chars. */
5115 size
= all_cases_count (type
, &sparseness
);
5116 bytes_needed
= (size
+ HOST_BITS_PER_CHAR
) / HOST_BITS_PER_CHAR
;
5118 if (size
> 0 && size
< 600000
5119 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5120 this optimization if we don't have enough memory rather than
5121 aborting, as xmalloc would do. */
5122 && (cases_seen
= (unsigned char *) calloc (bytes_needed
, 1)) != NULL
)
5125 tree v
= TYPE_VALUES (type
);
5127 /* The time complexity of this code is normally O(N), where
5128 N being the number of members in the enumerated type.
5129 However, if type is a ENUMERAL_TYPE whose values do not
5130 increase monotonically, O(N*log(N)) time may be needed. */
5132 mark_seen_cases (type
, cases_seen
, size
, sparseness
);
5134 for (i
= 0; v
!= NULL_TREE
&& i
< size
; i
++, v
= TREE_CHAIN (v
))
5136 if (BITARRAY_TEST(cases_seen
, i
) == 0)
5137 warning ("enumeration value `%s' not handled in switch",
5138 IDENTIFIER_POINTER (TREE_PURPOSE (v
)));
5144 /* Now we go the other way around; we warn if there are case
5145 expressions that don't correspond to enumerators. This can
5146 occur since C and C++ don't enforce type-checking of
5147 assignments to enumeration variables. */
5149 if (case_stack
->data
.case_stmt
.case_list
5150 && case_stack
->data
.case_stmt
.case_list
->left
)
5151 case_stack
->data
.case_stmt
.case_list
5152 = case_tree2list (case_stack
->data
.case_stmt
.case_list
, 0);
5154 for (n
= case_stack
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
5156 for (chain
= TYPE_VALUES (type
);
5157 chain
&& !tree_int_cst_equal (n
->low
, TREE_VALUE (chain
));
5158 chain
= TREE_CHAIN (chain
))
5163 if (TYPE_NAME (type
) == 0)
5164 warning ("case value `%ld' not in enumerated type",
5165 (long) TREE_INT_CST_LOW (n
->low
));
5167 warning ("case value `%ld' not in enumerated type `%s'",
5168 (long) TREE_INT_CST_LOW (n
->low
),
5169 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
5172 : DECL_NAME (TYPE_NAME (type
))));
5174 if (!tree_int_cst_equal (n
->low
, n
->high
))
5176 for (chain
= TYPE_VALUES (type
);
5177 chain
&& !tree_int_cst_equal (n
->high
, TREE_VALUE (chain
));
5178 chain
= TREE_CHAIN (chain
))
5183 if (TYPE_NAME (type
) == 0)
5184 warning ("case value `%ld' not in enumerated type",
5185 (long) TREE_INT_CST_LOW (n
->high
));
5187 warning ("case value `%ld' not in enumerated type `%s'",
5188 (long) TREE_INT_CST_LOW (n
->high
),
5189 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
5192 : DECL_NAME (TYPE_NAME (type
))));
5198 /* ??? This optimization is disabled because it causes valid programs to
5199 fail. ANSI C does not guarantee that an expression with enum type
5200 will have a value that is the same as one of the enumeration literals. */
5202 /* If all values were found as case labels, make one of them the default
5203 label. Thus, this switch will never fall through. We arbitrarily pick
5204 the last one to make the default since this is likely the most
5205 efficient choice. */
5209 for (l
= &case_stack
->data
.case_stmt
.case_list
;
5214 case_stack
->data
.case_stmt
.default_label
= (*l
)->code_label
;
5221 /* Terminate a case (Pascal) or switch (C) statement
5222 in which ORIG_INDEX is the expression to be tested.
5223 Generate the code to test it and jump to the right place. */
5226 expand_end_case (orig_index
)
5229 tree minval
= NULL_TREE
, maxval
= NULL_TREE
, range
= NULL_TREE
, orig_minval
;
5230 rtx default_label
= 0;
5231 register struct case_node
*n
;
5239 register struct nesting
*thiscase
= case_stack
;
5240 tree index_expr
, index_type
;
5243 /* Don't crash due to previous errors. */
5244 if (thiscase
== NULL
)
5247 table_label
= gen_label_rtx ();
5248 index_expr
= thiscase
->data
.case_stmt
.index_expr
;
5249 index_type
= TREE_TYPE (index_expr
);
5250 unsignedp
= TREE_UNSIGNED (index_type
);
5252 do_pending_stack_adjust ();
5254 /* This might get an spurious warning in the presence of a syntax error;
5255 it could be fixed by moving the call to check_seenlabel after the
5256 check for error_mark_node, and copying the code of check_seenlabel that
5257 deals with case_stack->data.case_stmt.line_number_status /
5258 restore_line_number_status in front of the call to end_cleanup_deferral;
5259 However, this might miss some useful warnings in the presence of
5260 non-syntax errors. */
5263 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5264 if (index_type
!= error_mark_node
)
5266 /* If switch expression was an enumerated type, check that all
5267 enumeration literals are covered by the cases.
5268 No sense trying this if there's a default case, however. */
5270 if (!thiscase
->data
.case_stmt
.default_label
5271 && TREE_CODE (TREE_TYPE (orig_index
)) == ENUMERAL_TYPE
5272 && TREE_CODE (index_expr
) != INTEGER_CST
)
5273 check_for_full_enumeration_handling (TREE_TYPE (orig_index
));
5275 /* If we don't have a default-label, create one here,
5276 after the body of the switch. */
5277 if (thiscase
->data
.case_stmt
.default_label
== 0)
5279 thiscase
->data
.case_stmt
.default_label
5280 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
5281 expand_label (thiscase
->data
.case_stmt
.default_label
);
5283 default_label
= label_rtx (thiscase
->data
.case_stmt
.default_label
);
5285 before_case
= get_last_insn ();
5287 if (thiscase
->data
.case_stmt
.case_list
5288 && thiscase
->data
.case_stmt
.case_list
->left
)
5289 thiscase
->data
.case_stmt
.case_list
5290 = case_tree2list(thiscase
->data
.case_stmt
.case_list
, 0);
5292 /* Simplify the case-list before we count it. */
5293 group_case_nodes (thiscase
->data
.case_stmt
.case_list
);
5295 /* Get upper and lower bounds of case values.
5296 Also convert all the case values to the index expr's data type. */
5299 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
5301 /* Check low and high label values are integers. */
5302 if (TREE_CODE (n
->low
) != INTEGER_CST
)
5304 if (TREE_CODE (n
->high
) != INTEGER_CST
)
5307 n
->low
= convert (index_type
, n
->low
);
5308 n
->high
= convert (index_type
, n
->high
);
5310 /* Count the elements and track the largest and smallest
5311 of them (treating them as signed even if they are not). */
5319 if (INT_CST_LT (n
->low
, minval
))
5321 if (INT_CST_LT (maxval
, n
->high
))
5324 /* A range counts double, since it requires two compares. */
5325 if (! tree_int_cst_equal (n
->low
, n
->high
))
5329 orig_minval
= minval
;
5331 /* Compute span of values. */
5333 range
= fold (build (MINUS_EXPR
, index_type
, maxval
, minval
));
5335 end_cleanup_deferral ();
5339 expand_expr (index_expr
, const0_rtx
, VOIDmode
, 0);
5341 emit_jump (default_label
);
5344 /* If range of values is much bigger than number of values,
5345 make a sequence of conditional branches instead of a dispatch.
5346 If the switch-index is a constant, do it this way
5347 because we can optimize it. */
5349 #ifndef CASE_VALUES_THRESHOLD
5351 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5353 /* If machine does not have a case insn that compares the
5354 bounds, this means extra overhead for dispatch tables
5355 which raises the threshold for using them. */
5356 #define CASE_VALUES_THRESHOLD 5
5357 #endif /* HAVE_casesi */
5358 #endif /* CASE_VALUES_THRESHOLD */
5360 else if (TREE_INT_CST_HIGH (range
) != 0
5361 || count
< (unsigned int) CASE_VALUES_THRESHOLD
5362 || ((unsigned HOST_WIDE_INT
) (TREE_INT_CST_LOW (range
))
5364 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5367 || TREE_CODE (index_expr
) == INTEGER_CST
5368 /* These will reduce to a constant. */
5369 || (TREE_CODE (index_expr
) == CALL_EXPR
5370 && TREE_CODE (TREE_OPERAND (index_expr
, 0)) == ADDR_EXPR
5371 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == FUNCTION_DECL
5372 && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == BUILT_IN_NORMAL
5373 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == BUILT_IN_CLASSIFY_TYPE
)
5374 || (TREE_CODE (index_expr
) == COMPOUND_EXPR
5375 && TREE_CODE (TREE_OPERAND (index_expr
, 1)) == INTEGER_CST
))
5377 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
5379 /* If the index is a short or char that we do not have
5380 an insn to handle comparisons directly, convert it to
5381 a full integer now, rather than letting each comparison
5382 generate the conversion. */
5384 if (GET_MODE_CLASS (GET_MODE (index
)) == MODE_INT
5385 && (cmp_optab
->handlers
[(int) GET_MODE(index
)].insn_code
5386 == CODE_FOR_nothing
))
5388 enum machine_mode wider_mode
;
5389 for (wider_mode
= GET_MODE (index
); wider_mode
!= VOIDmode
;
5390 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
5391 if (cmp_optab
->handlers
[(int) wider_mode
].insn_code
5392 != CODE_FOR_nothing
)
5394 index
= convert_to_mode (wider_mode
, index
, unsignedp
);
5400 do_pending_stack_adjust ();
5402 index
= protect_from_queue (index
, 0);
5403 if (GET_CODE (index
) == MEM
)
5404 index
= copy_to_reg (index
);
5405 if (GET_CODE (index
) == CONST_INT
5406 || TREE_CODE (index_expr
) == INTEGER_CST
)
5408 /* Make a tree node with the proper constant value
5409 if we don't already have one. */
5410 if (TREE_CODE (index_expr
) != INTEGER_CST
)
5413 = build_int_2 (INTVAL (index
),
5414 unsignedp
|| INTVAL (index
) >= 0 ? 0 : -1);
5415 index_expr
= convert (index_type
, index_expr
);
5418 /* For constant index expressions we need only
5419 issue a unconditional branch to the appropriate
5420 target code. The job of removing any unreachable
5421 code is left to the optimisation phase if the
5422 "-O" option is specified. */
5423 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
5424 if (! tree_int_cst_lt (index_expr
, n
->low
)
5425 && ! tree_int_cst_lt (n
->high
, index_expr
))
5429 emit_jump (label_rtx (n
->code_label
));
5431 emit_jump (default_label
);
5435 /* If the index expression is not constant we generate
5436 a binary decision tree to select the appropriate
5437 target code. This is done as follows:
5439 The list of cases is rearranged into a binary tree,
5440 nearly optimal assuming equal probability for each case.
5442 The tree is transformed into RTL, eliminating
5443 redundant test conditions at the same time.
5445 If program flow could reach the end of the
5446 decision tree an unconditional jump to the
5447 default code is emitted. */
5450 = (TREE_CODE (TREE_TYPE (orig_index
)) != ENUMERAL_TYPE
5451 && estimate_case_costs (thiscase
->data
.case_stmt
.case_list
));
5452 balance_case_nodes (&thiscase
->data
.case_stmt
.case_list
,
5454 emit_case_nodes (index
, thiscase
->data
.case_stmt
.case_list
,
5455 default_label
, index_type
);
5456 emit_jump_if_reachable (default_label
);
5465 enum machine_mode index_mode
= SImode
;
5466 int index_bits
= GET_MODE_BITSIZE (index_mode
);
5468 enum machine_mode op_mode
;
5470 /* Convert the index to SImode. */
5471 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
))
5472 > GET_MODE_BITSIZE (index_mode
))
5474 enum machine_mode omode
= TYPE_MODE (index_type
);
5475 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
5477 /* We must handle the endpoints in the original mode. */
5478 index_expr
= build (MINUS_EXPR
, index_type
,
5479 index_expr
, minval
);
5480 minval
= integer_zero_node
;
5481 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
5482 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
5483 omode
, 1, 0, default_label
);
5484 /* Now we can safely truncate. */
5485 index
= convert_to_mode (index_mode
, index
, 0);
5489 if (TYPE_MODE (index_type
) != index_mode
)
5491 index_expr
= convert (type_for_size (index_bits
, 0),
5493 index_type
= TREE_TYPE (index_expr
);
5496 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
5499 index
= protect_from_queue (index
, 0);
5500 do_pending_stack_adjust ();
5502 op_mode
= insn_data
[(int)CODE_FOR_casesi
].operand
[0].mode
;
5503 if (! (*insn_data
[(int)CODE_FOR_casesi
].operand
[0].predicate
)
5505 index
= copy_to_mode_reg (op_mode
, index
);
5507 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
5509 op_mode
= insn_data
[(int)CODE_FOR_casesi
].operand
[1].mode
;
5510 if (! (*insn_data
[(int)CODE_FOR_casesi
].operand
[1].predicate
)
5512 op1
= copy_to_mode_reg (op_mode
, op1
);
5514 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
5516 op_mode
= insn_data
[(int)CODE_FOR_casesi
].operand
[2].mode
;
5517 if (! (*insn_data
[(int)CODE_FOR_casesi
].operand
[2].predicate
)
5519 op2
= copy_to_mode_reg (op_mode
, op2
);
5521 emit_jump_insn (gen_casesi (index
, op1
, op2
,
5522 table_label
, default_label
));
5526 #ifdef HAVE_tablejump
5527 if (! win
&& HAVE_tablejump
)
5529 index_expr
= convert (thiscase
->data
.case_stmt
.nominal_type
,
5530 fold (build (MINUS_EXPR
, index_type
,
5531 index_expr
, minval
)));
5532 index_type
= TREE_TYPE (index_expr
);
5533 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
5535 index
= protect_from_queue (index
, 0);
5536 do_pending_stack_adjust ();
5538 do_tablejump (index
, TYPE_MODE (index_type
),
5539 expand_expr (range
, NULL_RTX
, VOIDmode
, 0),
5540 table_label
, default_label
);
5547 /* Get table of labels to jump to, in order of case index. */
5549 ncases
= TREE_INT_CST_LOW (range
) + 1;
5550 labelvec
= (rtx
*) alloca (ncases
* sizeof (rtx
));
5551 bzero ((char *) labelvec
, ncases
* sizeof (rtx
));
5553 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
5555 register HOST_WIDE_INT i
5556 = TREE_INT_CST_LOW (n
->low
) - TREE_INT_CST_LOW (orig_minval
);
5561 = gen_rtx_LABEL_REF (Pmode
, label_rtx (n
->code_label
));
5562 if (i
+ TREE_INT_CST_LOW (orig_minval
)
5563 == TREE_INT_CST_LOW (n
->high
))
5569 /* Fill in the gaps with the default. */
5570 for (i
= 0; i
< ncases
; i
++)
5571 if (labelvec
[i
] == 0)
5572 labelvec
[i
] = gen_rtx_LABEL_REF (Pmode
, default_label
);
5574 /* Output the table */
5575 emit_label (table_label
);
5577 if (CASE_VECTOR_PC_RELATIVE
|| flag_pic
)
5578 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE
,
5579 gen_rtx_LABEL_REF (Pmode
, table_label
),
5580 gen_rtvec_v (ncases
, labelvec
),
5581 const0_rtx
, const0_rtx
));
5583 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE
,
5584 gen_rtvec_v (ncases
, labelvec
)));
5586 /* If the case insn drops through the table,
5587 after the table we must jump to the default-label.
5588 Otherwise record no drop-through after the table. */
5589 #ifdef CASE_DROPS_THROUGH
5590 emit_jump (default_label
);
5596 before_case
= squeeze_notes (NEXT_INSN (before_case
), get_last_insn ());
5597 reorder_insns (before_case
, get_last_insn (),
5598 thiscase
->data
.case_stmt
.start
);
5601 end_cleanup_deferral ();
5603 if (thiscase
->exit_label
)
5604 emit_label (thiscase
->exit_label
);
5606 POPSTACK (case_stack
);
5611 /* Convert the tree NODE into a list linked by the right field, with the left
5612 field zeroed. RIGHT is used for recursion; it is a list to be placed
5613 rightmost in the resulting list. */
5615 static struct case_node
*
5616 case_tree2list (node
, right
)
5617 struct case_node
*node
, *right
;
5619 struct case_node
*left
;
5622 right
= case_tree2list (node
->right
, right
);
5624 node
->right
= right
;
5625 if ((left
= node
->left
))
5628 return case_tree2list (left
, node
);
5634 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5637 do_jump_if_equal (op1
, op2
, label
, unsignedp
)
5638 rtx op1
, op2
, label
;
5641 if (GET_CODE (op1
) == CONST_INT
5642 && GET_CODE (op2
) == CONST_INT
)
5644 if (INTVAL (op1
) == INTVAL (op2
))
5649 enum machine_mode mode
= GET_MODE (op1
);
5650 if (mode
== VOIDmode
)
5651 mode
= GET_MODE (op2
);
5652 emit_cmp_and_jump_insns (op1
, op2
, EQ
, NULL_RTX
, mode
, unsignedp
,
5657 /* Not all case values are encountered equally. This function
5658 uses a heuristic to weight case labels, in cases where that
5659 looks like a reasonable thing to do.
5661 Right now, all we try to guess is text, and we establish the
5664 chars above space: 16
5673 If we find any cases in the switch that are not either -1 or in the range
5674 of valid ASCII characters, or are control characters other than those
5675 commonly used with "\", don't treat this switch scanning text.
5677 Return 1 if these nodes are suitable for cost estimation, otherwise
5681 estimate_case_costs (node
)
5684 tree min_ascii
= build_int_2 (-1, -1);
5685 tree max_ascii
= convert (TREE_TYPE (node
->high
), build_int_2 (127, 0));
5689 /* If we haven't already made the cost table, make it now. Note that the
5690 lower bound of the table is -1, not zero. */
5692 if (cost_table
== NULL
)
5694 cost_table
= ((short *) xcalloc (129, sizeof (short))) + 1;
5696 for (i
= 0; i
< 128; i
++)
5700 else if (ISPUNCT (i
))
5702 else if (ISCNTRL (i
))
5706 cost_table
[' '] = 8;
5707 cost_table
['\t'] = 4;
5708 cost_table
['\0'] = 4;
5709 cost_table
['\n'] = 2;
5710 cost_table
['\f'] = 1;
5711 cost_table
['\v'] = 1;
5712 cost_table
['\b'] = 1;
5715 /* See if all the case expressions look like text. It is text if the
5716 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5717 as signed arithmetic since we don't want to ever access cost_table with a
5718 value less than -1. Also check that none of the constants in a range
5719 are strange control characters. */
5721 for (n
= node
; n
; n
= n
->right
)
5723 if ((INT_CST_LT (n
->low
, min_ascii
)) || INT_CST_LT (max_ascii
, n
->high
))
5726 for (i
= TREE_INT_CST_LOW (n
->low
); i
<= TREE_INT_CST_LOW (n
->high
); i
++)
5727 if (cost_table
[i
] < 0)
5731 /* All interesting values are within the range of interesting
5732 ASCII characters. */
5736 /* Scan an ordered list of case nodes
5737 combining those with consecutive values or ranges.
5739 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5742 group_case_nodes (head
)
5745 case_node_ptr node
= head
;
5749 rtx lb
= next_real_insn (label_rtx (node
->code_label
));
5751 case_node_ptr np
= node
;
5753 /* Try to group the successors of NODE with NODE. */
5754 while (((np
= np
->right
) != 0)
5755 /* Do they jump to the same place? */
5756 && ((lb2
= next_real_insn (label_rtx (np
->code_label
))) == lb
5757 || (lb
!= 0 && lb2
!= 0
5758 && simplejump_p (lb
)
5759 && simplejump_p (lb2
)
5760 && rtx_equal_p (SET_SRC (PATTERN (lb
)),
5761 SET_SRC (PATTERN (lb2
)))))
5762 /* Are their ranges consecutive? */
5763 && tree_int_cst_equal (np
->low
,
5764 fold (build (PLUS_EXPR
,
5765 TREE_TYPE (node
->high
),
5768 /* An overflow is not consecutive. */
5769 && tree_int_cst_lt (node
->high
,
5770 fold (build (PLUS_EXPR
,
5771 TREE_TYPE (node
->high
),
5773 integer_one_node
))))
5775 node
->high
= np
->high
;
5777 /* NP is the first node after NODE which can't be grouped with it.
5778 Delete the nodes in between, and move on to that node. */
5784 /* Take an ordered list of case nodes
5785 and transform them into a near optimal binary tree,
5786 on the assumption that any target code selection value is as
5787 likely as any other.
5789 The transformation is performed by splitting the ordered
5790 list into two equal sections plus a pivot. The parts are
5791 then attached to the pivot as left and right branches. Each
5792 branch is then transformed recursively. */
5795 balance_case_nodes (head
, parent
)
5796 case_node_ptr
*head
;
5797 case_node_ptr parent
;
5799 register case_node_ptr np
;
5807 register case_node_ptr
*npp
;
5810 /* Count the number of entries on branch. Also count the ranges. */
5814 if (!tree_int_cst_equal (np
->low
, np
->high
))
5818 cost
+= cost_table
[TREE_INT_CST_LOW (np
->high
)];
5822 cost
+= cost_table
[TREE_INT_CST_LOW (np
->low
)];
5830 /* Split this list if it is long enough for that to help. */
5835 /* Find the place in the list that bisects the list's total cost,
5836 Here I gets half the total cost. */
5841 /* Skip nodes while their cost does not reach that amount. */
5842 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
5843 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->high
)];
5844 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->low
)];
5847 npp
= &(*npp
)->right
;
5852 /* Leave this branch lopsided, but optimize left-hand
5853 side and fill in `parent' fields for right-hand side. */
5855 np
->parent
= parent
;
5856 balance_case_nodes (&np
->left
, np
);
5857 for (; np
->right
; np
= np
->right
)
5858 np
->right
->parent
= np
;
5862 /* If there are just three nodes, split at the middle one. */
5864 npp
= &(*npp
)->right
;
5867 /* Find the place in the list that bisects the list's total cost,
5868 where ranges count as 2.
5869 Here I gets half the total cost. */
5870 i
= (i
+ ranges
+ 1) / 2;
5873 /* Skip nodes while their cost does not reach that amount. */
5874 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
5879 npp
= &(*npp
)->right
;
5884 np
->parent
= parent
;
5887 /* Optimize each of the two split parts. */
5888 balance_case_nodes (&np
->left
, np
);
5889 balance_case_nodes (&np
->right
, np
);
5893 /* Else leave this branch as one level,
5894 but fill in `parent' fields. */
5896 np
->parent
= parent
;
5897 for (; np
->right
; np
= np
->right
)
5898 np
->right
->parent
= np
;
5903 /* Search the parent sections of the case node tree
5904 to see if a test for the lower bound of NODE would be redundant.
5905 INDEX_TYPE is the type of the index expression.
5907 The instructions to generate the case decision tree are
5908 output in the same order as nodes are processed so it is
5909 known that if a parent node checks the range of the current
5910 node minus one that the current node is bounded at its lower
5911 span. Thus the test would be redundant. */
5914 node_has_low_bound (node
, index_type
)
5919 case_node_ptr pnode
;
5921 /* If the lower bound of this node is the lowest value in the index type,
5922 we need not test it. */
5924 if (tree_int_cst_equal (node
->low
, TYPE_MIN_VALUE (index_type
)))
5927 /* If this node has a left branch, the value at the left must be less
5928 than that at this node, so it cannot be bounded at the bottom and
5929 we need not bother testing any further. */
5934 low_minus_one
= fold (build (MINUS_EXPR
, TREE_TYPE (node
->low
),
5935 node
->low
, integer_one_node
));
5937 /* If the subtraction above overflowed, we can't verify anything.
5938 Otherwise, look for a parent that tests our value - 1. */
5940 if (! tree_int_cst_lt (low_minus_one
, node
->low
))
5943 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
5944 if (tree_int_cst_equal (low_minus_one
, pnode
->high
))
5950 /* Search the parent sections of the case node tree
5951 to see if a test for the upper bound of NODE would be redundant.
5952 INDEX_TYPE is the type of the index expression.
5954 The instructions to generate the case decision tree are
5955 output in the same order as nodes are processed so it is
5956 known that if a parent node checks the range of the current
5957 node plus one that the current node is bounded at its upper
5958 span. Thus the test would be redundant. */
5961 node_has_high_bound (node
, index_type
)
5966 case_node_ptr pnode
;
5968 /* If there is no upper bound, obviously no test is needed. */
5970 if (TYPE_MAX_VALUE (index_type
) == NULL
)
5973 /* If the upper bound of this node is the highest value in the type
5974 of the index expression, we need not test against it. */
5976 if (tree_int_cst_equal (node
->high
, TYPE_MAX_VALUE (index_type
)))
5979 /* If this node has a right branch, the value at the right must be greater
5980 than that at this node, so it cannot be bounded at the top and
5981 we need not bother testing any further. */
5986 high_plus_one
= fold (build (PLUS_EXPR
, TREE_TYPE (node
->high
),
5987 node
->high
, integer_one_node
));
5989 /* If the addition above overflowed, we can't verify anything.
5990 Otherwise, look for a parent that tests our value + 1. */
5992 if (! tree_int_cst_lt (node
->high
, high_plus_one
))
5995 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
5996 if (tree_int_cst_equal (high_plus_one
, pnode
->low
))
6002 /* Search the parent sections of the
6003 case node tree to see if both tests for the upper and lower
6004 bounds of NODE would be redundant. */
6007 node_is_bounded (node
, index_type
)
6011 return (node_has_low_bound (node
, index_type
)
6012 && node_has_high_bound (node
, index_type
));
6015 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6018 emit_jump_if_reachable (label
)
6021 if (GET_CODE (get_last_insn ()) != BARRIER
)
6025 /* Emit step-by-step code to select a case for the value of INDEX.
6026 The thus generated decision tree follows the form of the
6027 case-node binary tree NODE, whose nodes represent test conditions.
6028 INDEX_TYPE is the type of the index of the switch.
6030 Care is taken to prune redundant tests from the decision tree
6031 by detecting any boundary conditions already checked by
6032 emitted rtx. (See node_has_high_bound, node_has_low_bound
6033 and node_is_bounded, above.)
6035 Where the test conditions can be shown to be redundant we emit
6036 an unconditional jump to the target code. As a further
6037 optimization, the subordinates of a tree node are examined to
6038 check for bounded nodes. In this case conditional and/or
6039 unconditional jumps as a result of the boundary check for the
6040 current node are arranged to target the subordinates associated
6041 code for out of bound conditions on the current node.
6043 We can assume that when control reaches the code generated here,
6044 the index value has already been compared with the parents
6045 of this node, and determined to be on the same side of each parent
6046 as this node is. Thus, if this node tests for the value 51,
6047 and a parent tested for 52, we don't need to consider
6048 the possibility of a value greater than 51. If another parent
6049 tests for the value 50, then this node need not test anything. */
6052 emit_case_nodes (index
, node
, default_label
, index_type
)
6058 /* If INDEX has an unsigned type, we must make unsigned branches. */
6059 int unsignedp
= TREE_UNSIGNED (index_type
);
6060 enum machine_mode mode
= GET_MODE (index
);
6062 /* See if our parents have already tested everything for us.
6063 If they have, emit an unconditional jump for this node. */
6064 if (node_is_bounded (node
, index_type
))
6065 emit_jump (label_rtx (node
->code_label
));
6067 else if (tree_int_cst_equal (node
->low
, node
->high
))
6069 /* Node is single valued. First see if the index expression matches
6070 this node and then check our children, if any. */
6072 do_jump_if_equal (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
6073 label_rtx (node
->code_label
), unsignedp
);
6075 if (node
->right
!= 0 && node
->left
!= 0)
6077 /* This node has children on both sides.
6078 Dispatch to one side or the other
6079 by comparing the index value with this node's value.
6080 If one subtree is bounded, check that one first,
6081 so we can avoid real branches in the tree. */
6083 if (node_is_bounded (node
->right
, index_type
))
6085 emit_cmp_and_jump_insns (index
, expand_expr (node
->high
, NULL_RTX
,
6087 GT
, NULL_RTX
, mode
, unsignedp
, 0,
6088 label_rtx (node
->right
->code_label
));
6089 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
6092 else if (node_is_bounded (node
->left
, index_type
))
6094 emit_cmp_and_jump_insns (index
, expand_expr (node
->high
, NULL_RTX
,
6096 LT
, NULL_RTX
, mode
, unsignedp
, 0,
6097 label_rtx (node
->left
->code_label
));
6098 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
6103 /* Neither node is bounded. First distinguish the two sides;
6104 then emit the code for one side at a time. */
6107 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
6109 /* See if the value is on the right. */
6110 emit_cmp_and_jump_insns (index
, expand_expr (node
->high
, NULL_RTX
,
6112 GT
, NULL_RTX
, mode
, unsignedp
, 0,
6113 label_rtx (test_label
));
6115 /* Value must be on the left.
6116 Handle the left-hand subtree. */
6117 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
6118 /* If left-hand subtree does nothing,
6120 emit_jump_if_reachable (default_label
);
6122 /* Code branches here for the right-hand subtree. */
6123 expand_label (test_label
);
6124 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
6128 else if (node
->right
!= 0 && node
->left
== 0)
6130 /* Here we have a right child but no left so we issue conditional
6131 branch to default and process the right child.
6133 Omit the conditional branch to default if we it avoid only one
6134 right child; it costs too much space to save so little time. */
6136 if (node
->right
->right
|| node
->right
->left
6137 || !tree_int_cst_equal (node
->right
->low
, node
->right
->high
))
6139 if (!node_has_low_bound (node
, index_type
))
6141 emit_cmp_and_jump_insns (index
, expand_expr (node
->high
,
6144 LT
, NULL_RTX
, mode
, unsignedp
, 0,
6148 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
6151 /* We cannot process node->right normally
6152 since we haven't ruled out the numbers less than
6153 this node's value. So handle node->right explicitly. */
6154 do_jump_if_equal (index
,
6155 expand_expr (node
->right
->low
, NULL_RTX
,
6157 label_rtx (node
->right
->code_label
), unsignedp
);
6160 else if (node
->right
== 0 && node
->left
!= 0)
6162 /* Just one subtree, on the left. */
6164 #if 0 /* The following code and comment were formerly part
6165 of the condition here, but they didn't work
6166 and I don't understand what the idea was. -- rms. */
6167 /* If our "most probable entry" is less probable
6168 than the default label, emit a jump to
6169 the default label using condition codes
6170 already lying around. With no right branch,
6171 a branch-greater-than will get us to the default
6174 && cost_table
[TREE_INT_CST_LOW (node
->high
)] < 12)
6177 if (node
->left
->left
|| node
->left
->right
6178 || !tree_int_cst_equal (node
->left
->low
, node
->left
->high
))
6180 if (!node_has_high_bound (node
, index_type
))
6182 emit_cmp_and_jump_insns (index
, expand_expr (node
->high
,
6185 GT
, NULL_RTX
, mode
, unsignedp
, 0,
6189 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
6192 /* We cannot process node->left normally
6193 since we haven't ruled out the numbers less than
6194 this node's value. So handle node->left explicitly. */
6195 do_jump_if_equal (index
,
6196 expand_expr (node
->left
->low
, NULL_RTX
,
6198 label_rtx (node
->left
->code_label
), unsignedp
);
6203 /* Node is a range. These cases are very similar to those for a single
6204 value, except that we do not start by testing whether this node
6205 is the one to branch to. */
6207 if (node
->right
!= 0 && node
->left
!= 0)
6209 /* Node has subtrees on both sides.
6210 If the right-hand subtree is bounded,
6211 test for it first, since we can go straight there.
6212 Otherwise, we need to make a branch in the control structure,
6213 then handle the two subtrees. */
6214 tree test_label
= 0;
6217 if (node_is_bounded (node
->right
, index_type
))
6218 /* Right hand node is fully bounded so we can eliminate any
6219 testing and branch directly to the target code. */
6220 emit_cmp_and_jump_insns (index
, expand_expr (node
->high
, NULL_RTX
,
6222 GT
, NULL_RTX
, mode
, unsignedp
, 0,
6223 label_rtx (node
->right
->code_label
));
6226 /* Right hand node requires testing.
6227 Branch to a label where we will handle it later. */
6229 test_label
= build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
6230 emit_cmp_and_jump_insns (index
, expand_expr (node
->high
, NULL_RTX
,
6232 GT
, NULL_RTX
, mode
, unsignedp
, 0,
6233 label_rtx (test_label
));
6236 /* Value belongs to this node or to the left-hand subtree. */
6238 emit_cmp_and_jump_insns (index
, expand_expr (node
->low
, NULL_RTX
,
6240 GE
, NULL_RTX
, mode
, unsignedp
, 0,
6241 label_rtx (node
->code_label
));
6243 /* Handle the left-hand subtree. */
6244 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
6246 /* If right node had to be handled later, do that now. */
6250 /* If the left-hand subtree fell through,
6251 don't let it fall into the right-hand subtree. */
6252 emit_jump_if_reachable (default_label
);
6254 expand_label (test_label
);
6255 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
6259 else if (node
->right
!= 0 && node
->left
== 0)
6261 /* Deal with values to the left of this node,
6262 if they are possible. */
6263 if (!node_has_low_bound (node
, index_type
))
6265 emit_cmp_and_jump_insns (index
, expand_expr (node
->low
, NULL_RTX
,
6267 LT
, NULL_RTX
, mode
, unsignedp
, 0,
6271 /* Value belongs to this node or to the right-hand subtree. */
6273 emit_cmp_and_jump_insns (index
, expand_expr (node
->high
, NULL_RTX
,
6275 LE
, NULL_RTX
, mode
, unsignedp
, 0,
6276 label_rtx (node
->code_label
));
6278 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
6281 else if (node
->right
== 0 && node
->left
!= 0)
6283 /* Deal with values to the right of this node,
6284 if they are possible. */
6285 if (!node_has_high_bound (node
, index_type
))
6287 emit_cmp_and_jump_insns (index
, expand_expr (node
->high
, NULL_RTX
,
6289 GT
, NULL_RTX
, mode
, unsignedp
, 0,
6293 /* Value belongs to this node or to the left-hand subtree. */
6295 emit_cmp_and_jump_insns (index
, expand_expr (node
->low
, NULL_RTX
,
6297 GE
, NULL_RTX
, mode
, unsignedp
, 0,
6298 label_rtx (node
->code_label
));
6300 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
6305 /* Node has no children so we check low and high bounds to remove
6306 redundant tests. Only one of the bounds can exist,
6307 since otherwise this node is bounded--a case tested already. */
6309 if (!node_has_high_bound (node
, index_type
))
6311 emit_cmp_and_jump_insns (index
, expand_expr (node
->high
, NULL_RTX
,
6313 GT
, NULL_RTX
, mode
, unsignedp
, 0,
6317 if (!node_has_low_bound (node
, index_type
))
6319 emit_cmp_and_jump_insns (index
, expand_expr (node
->low
, NULL_RTX
,
6321 LT
, NULL_RTX
, mode
, unsignedp
, 0,
6325 emit_jump (label_rtx (node
->code_label
));
6330 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6331 so that the debugging info will be correct for the unrolled loop. */
6334 find_loop_tree_blocks ()
6336 identify_blocks (DECL_INITIAL (current_function_decl
), get_insns ());
6340 unroll_block_trees ()
6342 tree block
= DECL_INITIAL (current_function_decl
);
6344 reorder_blocks (block
, get_insns ());