Improve support for arm-wince-pe target:
[official-gcc.git] / gcc / stmt.c
bloba69beb0696954911c29ad7cc3e52423943ccf4ed
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
61 /* Assume that case vectors are not pc-relative. */
62 #ifndef CASE_VECTOR_PC_RELATIVE
63 #define CASE_VECTOR_PC_RELATIVE 0
64 #endif
66 /* Functions and data structures for expanding case statements. */
68 /* Case label structure, used to hold info on labels within case
69 statements. We handle "range" labels; for a single-value label
70 as in C, the high and low limits are the same.
72 An AVL tree of case nodes is initially created, and later transformed
73 to a list linked via the RIGHT fields in the nodes. Nodes with
74 higher case values are later in the list.
76 Switch statements can be output in one of two forms. A branch table
77 is used if there are more than a few labels and the labels are dense
78 within the range between the smallest and largest case value. If a
79 branch table is used, no further manipulations are done with the case
80 node chain.
82 The alternative to the use of a branch table is to generate a series
83 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
84 and PARENT fields to hold a binary tree. Initially the tree is
85 totally unbalanced, with everything on the right. We balance the tree
86 with nodes on the left having lower case values than the parent
87 and nodes on the right having higher values. We then output the tree
88 in order. */
90 struct case_node GTY(())
92 struct case_node *left; /* Left son in binary tree */
93 struct case_node *right; /* Right son in binary tree; also node chain */
94 struct case_node *parent; /* Parent of node in binary tree */
95 tree low; /* Lowest index value for this label */
96 tree high; /* Highest index value for this label */
97 tree code_label; /* Label to jump to when node matches */
98 int balance;
101 typedef struct case_node case_node;
102 typedef struct case_node *case_node_ptr;
104 /* These are used by estimate_case_costs and balance_case_nodes. */
106 /* This must be a signed type, and non-ANSI compilers lack signed char. */
107 static short cost_table_[129];
108 static int use_cost_table;
109 static int cost_table_initialized;
111 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
112 is unsigned. */
113 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
115 /* Stack of control and binding constructs we are currently inside.
117 These constructs begin when you call `expand_start_WHATEVER'
118 and end when you call `expand_end_WHATEVER'. This stack records
119 info about how the construct began that tells the end-function
120 what to do. It also may provide information about the construct
121 to alter the behavior of other constructs within the body.
122 For example, they may affect the behavior of C `break' and `continue'.
124 Each construct gets one `struct nesting' object.
125 All of these objects are chained through the `all' field.
126 `nesting_stack' points to the first object (innermost construct).
127 The position of an entry on `nesting_stack' is in its `depth' field.
129 Each type of construct has its own individual stack.
130 For example, loops have `loop_stack'. Each object points to the
131 next object of the same type through the `next' field.
133 Some constructs are visible to `break' exit-statements and others
134 are not. Which constructs are visible depends on the language.
135 Therefore, the data structure allows each construct to be visible
136 or not, according to the args given when the construct is started.
137 The construct is visible if the `exit_label' field is non-null.
138 In that case, the value should be a CODE_LABEL rtx. */
140 struct nesting GTY(())
142 struct nesting *all;
143 struct nesting *next;
144 int depth;
145 rtx exit_label;
146 enum nesting_desc {
147 COND_NESTING,
148 LOOP_NESTING,
149 BLOCK_NESTING,
150 CASE_NESTING
151 } desc;
152 union nesting_u
154 /* For conds (if-then and if-then-else statements). */
155 struct nesting_cond
157 /* Label for the end of the if construct.
158 There is none if EXITFLAG was not set
159 and no `else' has been seen yet. */
160 rtx endif_label;
161 /* Label for the end of this alternative.
162 This may be the end of the if or the next else/elseif. */
163 rtx next_label;
164 } GTY ((tag ("COND_NESTING"))) cond;
165 /* For loops. */
166 struct nesting_loop
168 /* Label at the top of the loop; place to loop back to. */
169 rtx start_label;
170 /* Label at the end of the whole construct. */
171 rtx end_label;
172 /* Label for `continue' statement to jump to;
173 this is in front of the stepper of the loop. */
174 rtx continue_label;
175 } GTY ((tag ("LOOP_NESTING"))) loop;
176 /* For variable binding contours. */
177 struct nesting_block
179 /* Sequence number of this binding contour within the function,
180 in order of entry. */
181 int block_start_count;
182 /* Nonzero => value to restore stack to on exit. */
183 rtx stack_level;
184 /* The NOTE that starts this contour.
185 Used by expand_goto to check whether the destination
186 is within each contour or not. */
187 rtx first_insn;
188 /* Innermost containing binding contour that has a stack level. */
189 struct nesting *innermost_stack_block;
190 /* List of cleanups to be run on exit from this contour.
191 This is a list of expressions to be evaluated.
192 The TREE_PURPOSE of each link is the ..._DECL node
193 which the cleanup pertains to. */
194 tree cleanups;
195 /* List of cleanup-lists of blocks containing this block,
196 as they were at the locus where this block appears.
197 There is an element for each containing block,
198 ordered innermost containing block first.
199 The tail of this list can be 0,
200 if all remaining elements would be empty lists.
201 The element's TREE_VALUE is the cleanup-list of that block,
202 which may be null. */
203 tree outer_cleanups;
204 /* Chain of labels defined inside this binding contour.
205 For contours that have stack levels or cleanups. */
206 struct label_chain *label_chain;
207 /* Nonzero if this is associated with an EH region. */
208 int exception_region;
209 /* The saved target_temp_slot_level from our outer block.
210 We may reset target_temp_slot_level to be the level of
211 this block, if that is done, target_temp_slot_level
212 reverts to the saved target_temp_slot_level at the very
213 end of the block. */
214 int block_target_temp_slot_level;
215 /* True if we are currently emitting insns in an area of
216 output code that is controlled by a conditional
217 expression. This is used by the cleanup handling code to
218 generate conditional cleanup actions. */
219 int conditional_code;
220 /* A place to move the start of the exception region for any
221 of the conditional cleanups, must be at the end or after
222 the start of the last unconditional cleanup, and before any
223 conditional branch points. */
224 rtx last_unconditional_cleanup;
225 } GTY ((tag ("BLOCK_NESTING"))) block;
226 /* For switch (C) or case (Pascal) statements,
227 and also for dummies (see `expand_start_case_dummy'). */
228 struct nesting_case
230 /* The insn after which the case dispatch should finally
231 be emitted. Zero for a dummy. */
232 rtx start;
233 /* A list of case labels; it is first built as an AVL tree.
234 During expand_end_case, this is converted to a list, and may be
235 rearranged into a nearly balanced binary tree. */
236 struct case_node *case_list;
237 /* Label to jump to if no case matches. */
238 tree default_label;
239 /* The expression to be dispatched on. */
240 tree index_expr;
241 /* Type that INDEX_EXPR should be converted to. */
242 tree nominal_type;
243 /* Name of this kind of statement, for warnings. */
244 const char *printname;
245 /* Used to save no_line_numbers till we see the first case label.
246 We set this to -1 when we see the first case label in this
247 case statement. */
248 int line_number_status;
249 } GTY ((tag ("CASE_NESTING"))) case_stmt;
250 } GTY ((desc ("%1.desc"))) data;
253 /* Allocate and return a new `struct nesting'. */
255 #define ALLOC_NESTING() \
256 (struct nesting *) ggc_alloc (sizeof (struct nesting))
258 /* Pop the nesting stack element by element until we pop off
259 the element which is at the top of STACK.
260 Update all the other stacks, popping off elements from them
261 as we pop them from nesting_stack. */
263 #define POPSTACK(STACK) \
264 do { struct nesting *target = STACK; \
265 struct nesting *this; \
266 do { this = nesting_stack; \
267 if (loop_stack == this) \
268 loop_stack = loop_stack->next; \
269 if (cond_stack == this) \
270 cond_stack = cond_stack->next; \
271 if (block_stack == this) \
272 block_stack = block_stack->next; \
273 if (stack_block_stack == this) \
274 stack_block_stack = stack_block_stack->next; \
275 if (case_stack == this) \
276 case_stack = case_stack->next; \
277 nesting_depth = nesting_stack->depth - 1; \
278 nesting_stack = this->all; } \
279 while (this != target); } while (0)
281 /* In some cases it is impossible to generate code for a forward goto
282 until the label definition is seen. This happens when it may be necessary
283 for the goto to reset the stack pointer: we don't yet know how to do that.
284 So expand_goto puts an entry on this fixup list.
285 Each time a binding contour that resets the stack is exited,
286 we check each fixup.
287 If the target label has now been defined, we can insert the proper code. */
289 struct goto_fixup GTY(())
291 /* Points to following fixup. */
292 struct goto_fixup *next;
293 /* Points to the insn before the jump insn.
294 If more code must be inserted, it goes after this insn. */
295 rtx before_jump;
296 /* The LABEL_DECL that this jump is jumping to, or 0
297 for break, continue or return. */
298 tree target;
299 /* The BLOCK for the place where this goto was found. */
300 tree context;
301 /* The CODE_LABEL rtx that this is jumping to. */
302 rtx target_rtl;
303 /* Number of binding contours started in current function
304 before the label reference. */
305 int block_start_count;
306 /* The outermost stack level that should be restored for this jump.
307 Each time a binding contour that resets the stack is exited,
308 if the target label is *not* yet defined, this slot is updated. */
309 rtx stack_level;
310 /* List of lists of cleanup expressions to be run by this goto.
311 There is one element for each block that this goto is within.
312 The tail of this list can be 0,
313 if all remaining elements would be empty.
314 The TREE_VALUE contains the cleanup list of that block as of the
315 time this goto was seen.
316 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
317 tree cleanup_list_list;
320 /* Within any binding contour that must restore a stack level,
321 all labels are recorded with a chain of these structures. */
323 struct label_chain GTY(())
325 /* Points to following fixup. */
326 struct label_chain *next;
327 tree label;
330 struct stmt_status GTY(())
332 /* Chain of all pending binding contours. */
333 struct nesting * x_block_stack;
335 /* If any new stacks are added here, add them to POPSTACKS too. */
337 /* Chain of all pending binding contours that restore stack levels
338 or have cleanups. */
339 struct nesting * x_stack_block_stack;
341 /* Chain of all pending conditional statements. */
342 struct nesting * x_cond_stack;
344 /* Chain of all pending loops. */
345 struct nesting * x_loop_stack;
347 /* Chain of all pending case or switch statements. */
348 struct nesting * x_case_stack;
350 /* Separate chain including all of the above,
351 chained through the `all' field. */
352 struct nesting * x_nesting_stack;
354 /* Number of entries on nesting_stack now. */
355 int x_nesting_depth;
357 /* Number of binding contours started so far in this function. */
358 int x_block_start_count;
360 /* Each time we expand an expression-statement,
361 record the expr's type and its RTL value here. */
362 tree x_last_expr_type;
363 rtx x_last_expr_value;
365 /* Nonzero if within a ({...}) grouping, in which case we must
366 always compute a value for each expr-stmt in case it is the last one. */
367 int x_expr_stmts_for_value;
369 /* Location of last line-number note, whether we actually
370 emitted it or not. */
371 location_t x_emit_locus;
373 struct goto_fixup *x_goto_fixup_chain;
376 #define block_stack (cfun->stmt->x_block_stack)
377 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
378 #define cond_stack (cfun->stmt->x_cond_stack)
379 #define loop_stack (cfun->stmt->x_loop_stack)
380 #define case_stack (cfun->stmt->x_case_stack)
381 #define nesting_stack (cfun->stmt->x_nesting_stack)
382 #define nesting_depth (cfun->stmt->x_nesting_depth)
383 #define current_block_start_count (cfun->stmt->x_block_start_count)
384 #define last_expr_type (cfun->stmt->x_last_expr_type)
385 #define last_expr_value (cfun->stmt->x_last_expr_value)
386 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
387 #define emit_locus (cfun->stmt->x_emit_locus)
388 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
390 /* Nonzero if we are using EH to handle cleanups. */
391 static int using_eh_for_cleanups_p = 0;
393 static int n_occurrences PARAMS ((int, const char *));
394 static bool parse_input_constraint PARAMS ((const char **, int, int, int,
395 int, const char * const *,
396 bool *, bool *));
397 static bool decl_conflicts_with_clobbers_p PARAMS ((tree, const HARD_REG_SET));
398 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
399 static int expand_fixup PARAMS ((tree, rtx, rtx));
400 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
401 static void expand_nl_goto_receiver PARAMS ((void));
402 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
403 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
404 rtx, int));
405 static bool check_operand_nalternatives PARAMS ((tree, tree));
406 static bool check_unique_operand_names PARAMS ((tree, tree));
407 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
408 static void expand_null_return_1 PARAMS ((rtx));
409 static enum br_predictor return_prediction PARAMS ((rtx));
410 static void expand_value_return PARAMS ((rtx));
411 static int tail_recursion_args PARAMS ((tree, tree));
412 static void expand_cleanups PARAMS ((tree, int, int));
413 static void check_seenlabel PARAMS ((void));
414 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
415 static int estimate_case_costs PARAMS ((case_node_ptr));
416 static bool same_case_target_p PARAMS ((rtx, rtx));
417 static void strip_default_case_nodes PARAMS ((case_node_ptr *, rtx));
418 static bool lshift_cheap_p PARAMS ((void));
419 static int case_bit_test_cmp PARAMS ((const void *, const void *));
420 static void emit_case_bit_tests PARAMS ((tree, tree, tree, tree,
421 case_node_ptr, rtx));
422 static void group_case_nodes PARAMS ((case_node_ptr));
423 static void balance_case_nodes PARAMS ((case_node_ptr *,
424 case_node_ptr));
425 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
426 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
427 static int node_is_bounded PARAMS ((case_node_ptr, tree));
428 static void emit_jump_if_reachable PARAMS ((rtx));
429 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
430 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
432 void
433 using_eh_for_cleanups ()
435 using_eh_for_cleanups_p = 1;
438 void
439 init_stmt_for_function ()
441 cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
443 /* We are not currently within any block, conditional, loop or case. */
444 block_stack = 0;
445 stack_block_stack = 0;
446 loop_stack = 0;
447 case_stack = 0;
448 cond_stack = 0;
449 nesting_stack = 0;
450 nesting_depth = 0;
452 current_block_start_count = 0;
454 /* No gotos have been expanded yet. */
455 goto_fixup_chain = 0;
457 /* We are not processing a ({...}) grouping. */
458 expr_stmts_for_value = 0;
459 clear_last_expr ();
462 /* Record the current file and line. Called from emit_line_note. */
463 void
464 set_file_and_line_for_stmt (file, line)
465 const char *file;
466 int line;
468 /* If we're outputting an inline function, and we add a line note,
469 there may be no CFUN->STMT information. So, there's no need to
470 update it. */
471 if (cfun->stmt)
473 emit_locus.file = file;
474 emit_locus.line = line;
478 /* Emit a no-op instruction. */
480 void
481 emit_nop ()
483 rtx last_insn;
485 last_insn = get_last_insn ();
486 if (!optimize
487 && (GET_CODE (last_insn) == CODE_LABEL
488 || (GET_CODE (last_insn) == NOTE
489 && prev_real_insn (last_insn) == 0)))
490 emit_insn (gen_nop ());
493 /* Return the rtx-label that corresponds to a LABEL_DECL,
494 creating it if necessary. */
497 label_rtx (label)
498 tree label;
500 if (TREE_CODE (label) != LABEL_DECL)
501 abort ();
503 if (!DECL_RTL_SET_P (label))
504 SET_DECL_RTL (label, gen_label_rtx ());
506 return DECL_RTL (label);
509 /* As above, but also put it on the forced-reference list of the
510 function that contains it. */
512 force_label_rtx (label)
513 tree label;
515 rtx ref = label_rtx (label);
516 tree function = decl_function_context (label);
517 struct function *p;
519 if (!function)
520 abort ();
522 if (function != current_function_decl
523 && function != inline_function_decl)
524 p = find_function_data (function);
525 else
526 p = cfun;
528 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
529 p->expr->x_forced_labels);
530 return ref;
533 /* Add an unconditional jump to LABEL as the next sequential instruction. */
535 void
536 emit_jump (label)
537 rtx label;
539 do_pending_stack_adjust ();
540 emit_jump_insn (gen_jump (label));
541 emit_barrier ();
544 /* Emit code to jump to the address
545 specified by the pointer expression EXP. */
547 void
548 expand_computed_goto (exp)
549 tree exp;
551 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
553 #ifdef POINTERS_EXTEND_UNSIGNED
554 if (GET_MODE (x) != Pmode)
555 x = convert_memory_address (Pmode, x);
556 #endif
558 emit_queue ();
560 if (! cfun->computed_goto_common_label)
562 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
563 cfun->computed_goto_common_label = gen_label_rtx ();
564 emit_label (cfun->computed_goto_common_label);
566 do_pending_stack_adjust ();
567 emit_indirect_jump (cfun->computed_goto_common_reg);
569 current_function_has_computed_jump = 1;
571 else
573 emit_move_insn (cfun->computed_goto_common_reg, x);
574 emit_jump (cfun->computed_goto_common_label);
578 /* Handle goto statements and the labels that they can go to. */
580 /* Specify the location in the RTL code of a label LABEL,
581 which is a LABEL_DECL tree node.
583 This is used for the kind of label that the user can jump to with a
584 goto statement, and for alternatives of a switch or case statement.
585 RTL labels generated for loops and conditionals don't go through here;
586 they are generated directly at the RTL level, by other functions below.
588 Note that this has nothing to do with defining label *names*.
589 Languages vary in how they do that and what that even means. */
591 void
592 expand_label (label)
593 tree label;
595 struct label_chain *p;
597 do_pending_stack_adjust ();
598 emit_label (label_rtx (label));
599 if (DECL_NAME (label))
600 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
602 if (stack_block_stack != 0)
604 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
605 p->next = stack_block_stack->data.block.label_chain;
606 stack_block_stack->data.block.label_chain = p;
607 p->label = label;
611 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
612 from nested functions. */
614 void
615 declare_nonlocal_label (label)
616 tree label;
618 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
620 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
621 LABEL_PRESERVE_P (label_rtx (label)) = 1;
622 if (nonlocal_goto_handler_slots == 0)
624 emit_stack_save (SAVE_NONLOCAL,
625 &nonlocal_goto_stack_level,
626 PREV_INSN (tail_recursion_reentry));
628 nonlocal_goto_handler_slots
629 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
632 /* Generate RTL code for a `goto' statement with target label LABEL.
633 LABEL should be a LABEL_DECL tree node that was or will later be
634 defined with `expand_label'. */
636 void
637 expand_goto (label)
638 tree label;
640 tree context;
642 /* Check for a nonlocal goto to a containing function. */
643 context = decl_function_context (label);
644 if (context != 0 && context != current_function_decl)
646 struct function *p = find_function_data (context);
647 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
648 rtx handler_slot, static_chain, save_area, insn;
649 tree link;
651 /* Find the corresponding handler slot for this label. */
652 handler_slot = p->x_nonlocal_goto_handler_slots;
653 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
654 link = TREE_CHAIN (link))
655 handler_slot = XEXP (handler_slot, 1);
656 handler_slot = XEXP (handler_slot, 0);
658 p->has_nonlocal_label = 1;
659 current_function_has_nonlocal_goto = 1;
660 LABEL_REF_NONLOCAL_P (label_ref) = 1;
662 /* Copy the rtl for the slots so that they won't be shared in
663 case the virtual stack vars register gets instantiated differently
664 in the parent than in the child. */
666 static_chain = copy_to_reg (lookup_static_chain (label));
668 /* Get addr of containing function's current nonlocal goto handler,
669 which will do any cleanups and then jump to the label. */
670 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
671 virtual_stack_vars_rtx,
672 static_chain));
674 /* Get addr of containing function's nonlocal save area. */
675 save_area = p->x_nonlocal_goto_stack_level;
676 if (save_area)
677 save_area = replace_rtx (copy_rtx (save_area),
678 virtual_stack_vars_rtx, static_chain);
680 #if HAVE_nonlocal_goto
681 if (HAVE_nonlocal_goto)
682 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
683 save_area, label_ref));
684 else
685 #endif
687 /* Restore frame pointer for containing function.
688 This sets the actual hard register used for the frame pointer
689 to the location of the function's incoming static chain info.
690 The non-local goto handler will then adjust it to contain the
691 proper value and reload the argument pointer, if needed. */
692 emit_move_insn (hard_frame_pointer_rtx, static_chain);
693 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
695 /* USE of hard_frame_pointer_rtx added for consistency;
696 not clear if really needed. */
697 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
698 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
699 emit_indirect_jump (handler_slot);
702 /* Search backwards to the jump insn and mark it as a
703 non-local goto. */
704 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
706 if (GET_CODE (insn) == JUMP_INSN)
708 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
709 const0_rtx, REG_NOTES (insn));
710 break;
712 else if (GET_CODE (insn) == CALL_INSN)
713 break;
716 else
717 expand_goto_internal (label, label_rtx (label), NULL_RTX);
720 /* Generate RTL code for a `goto' statement with target label BODY.
721 LABEL should be a LABEL_REF.
722 LAST_INSN, if non-0, is the rtx we should consider as the last
723 insn emitted (for the purposes of cleaning up a return). */
725 static void
726 expand_goto_internal (body, label, last_insn)
727 tree body;
728 rtx label;
729 rtx last_insn;
731 struct nesting *block;
732 rtx stack_level = 0;
734 if (GET_CODE (label) != CODE_LABEL)
735 abort ();
737 /* If label has already been defined, we can tell now
738 whether and how we must alter the stack level. */
740 if (PREV_INSN (label) != 0)
742 /* Find the innermost pending block that contains the label.
743 (Check containment by comparing insn-uids.)
744 Then restore the outermost stack level within that block,
745 and do cleanups of all blocks contained in it. */
746 for (block = block_stack; block; block = block->next)
748 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
749 break;
750 if (block->data.block.stack_level != 0)
751 stack_level = block->data.block.stack_level;
752 /* Execute the cleanups for blocks we are exiting. */
753 if (block->data.block.cleanups != 0)
755 expand_cleanups (block->data.block.cleanups, 1, 1);
756 do_pending_stack_adjust ();
760 if (stack_level)
762 /* Ensure stack adjust isn't done by emit_jump, as this
763 would clobber the stack pointer. This one should be
764 deleted as dead by flow. */
765 clear_pending_stack_adjust ();
766 do_pending_stack_adjust ();
768 /* Don't do this adjust if it's to the end label and this function
769 is to return with a depressed stack pointer. */
770 if (label == return_label
771 && (((TREE_CODE (TREE_TYPE (current_function_decl))
772 == FUNCTION_TYPE)
773 && (TYPE_RETURNS_STACK_DEPRESSED
774 (TREE_TYPE (current_function_decl))))))
776 else
777 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
780 if (body != 0 && DECL_TOO_LATE (body))
781 error ("jump to `%s' invalidly jumps into binding contour",
782 IDENTIFIER_POINTER (DECL_NAME (body)));
784 /* Label not yet defined: may need to put this goto
785 on the fixup list. */
786 else if (! expand_fixup (body, label, last_insn))
788 /* No fixup needed. Record that the label is the target
789 of at least one goto that has no fixup. */
790 if (body != 0)
791 TREE_ADDRESSABLE (body) = 1;
794 emit_jump (label);
797 /* Generate if necessary a fixup for a goto
798 whose target label in tree structure (if any) is TREE_LABEL
799 and whose target in rtl is RTL_LABEL.
801 If LAST_INSN is nonzero, we pretend that the jump appears
802 after insn LAST_INSN instead of at the current point in the insn stream.
804 The fixup will be used later to insert insns just before the goto.
805 Those insns will restore the stack level as appropriate for the
806 target label, and will (in the case of C++) also invoke any object
807 destructors which have to be invoked when we exit the scopes which
808 are exited by the goto.
810 Value is nonzero if a fixup is made. */
812 static int
813 expand_fixup (tree_label, rtl_label, last_insn)
814 tree tree_label;
815 rtx rtl_label;
816 rtx last_insn;
818 struct nesting *block, *end_block;
820 /* See if we can recognize which block the label will be output in.
821 This is possible in some very common cases.
822 If we succeed, set END_BLOCK to that block.
823 Otherwise, set it to 0. */
825 if (cond_stack
826 && (rtl_label == cond_stack->data.cond.endif_label
827 || rtl_label == cond_stack->data.cond.next_label))
828 end_block = cond_stack;
829 /* If we are in a loop, recognize certain labels which
830 are likely targets. This reduces the number of fixups
831 we need to create. */
832 else if (loop_stack
833 && (rtl_label == loop_stack->data.loop.start_label
834 || rtl_label == loop_stack->data.loop.end_label
835 || rtl_label == loop_stack->data.loop.continue_label))
836 end_block = loop_stack;
837 else
838 end_block = 0;
840 /* Now set END_BLOCK to the binding level to which we will return. */
842 if (end_block)
844 struct nesting *next_block = end_block->all;
845 block = block_stack;
847 /* First see if the END_BLOCK is inside the innermost binding level.
848 If so, then no cleanups or stack levels are relevant. */
849 while (next_block && next_block != block)
850 next_block = next_block->all;
852 if (next_block)
853 return 0;
855 /* Otherwise, set END_BLOCK to the innermost binding level
856 which is outside the relevant control-structure nesting. */
857 next_block = block_stack->next;
858 for (block = block_stack; block != end_block; block = block->all)
859 if (block == next_block)
860 next_block = next_block->next;
861 end_block = next_block;
864 /* Does any containing block have a stack level or cleanups?
865 If not, no fixup is needed, and that is the normal case
866 (the only case, for standard C). */
867 for (block = block_stack; block != end_block; block = block->next)
868 if (block->data.block.stack_level != 0
869 || block->data.block.cleanups != 0)
870 break;
872 if (block != end_block)
874 /* Ok, a fixup is needed. Add a fixup to the list of such. */
875 struct goto_fixup *fixup
876 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
877 /* In case an old stack level is restored, make sure that comes
878 after any pending stack adjust. */
879 /* ?? If the fixup isn't to come at the present position,
880 doing the stack adjust here isn't useful. Doing it with our
881 settings at that location isn't useful either. Let's hope
882 someone does it! */
883 if (last_insn == 0)
884 do_pending_stack_adjust ();
885 fixup->target = tree_label;
886 fixup->target_rtl = rtl_label;
888 /* Create a BLOCK node and a corresponding matched set of
889 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
890 this point. The notes will encapsulate any and all fixup
891 code which we might later insert at this point in the insn
892 stream. Also, the BLOCK node will be the parent (i.e. the
893 `SUPERBLOCK') of any other BLOCK nodes which we might create
894 later on when we are expanding the fixup code.
896 Note that optimization passes (including expand_end_loop)
897 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
898 as a placeholder. */
901 rtx original_before_jump
902 = last_insn ? last_insn : get_last_insn ();
903 rtx start;
904 rtx end;
905 tree block;
907 block = make_node (BLOCK);
908 TREE_USED (block) = 1;
910 if (!cfun->x_whole_function_mode_p)
911 (*lang_hooks.decls.insert_block) (block);
912 else
914 BLOCK_CHAIN (block)
915 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
916 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
917 = block;
920 start_sequence ();
921 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
922 if (cfun->x_whole_function_mode_p)
923 NOTE_BLOCK (start) = block;
924 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
925 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
926 if (cfun->x_whole_function_mode_p)
927 NOTE_BLOCK (end) = block;
928 fixup->context = block;
929 end_sequence ();
930 emit_insn_after (start, original_before_jump);
933 fixup->block_start_count = current_block_start_count;
934 fixup->stack_level = 0;
935 fixup->cleanup_list_list
936 = ((block->data.block.outer_cleanups
937 || block->data.block.cleanups)
938 ? tree_cons (NULL_TREE, block->data.block.cleanups,
939 block->data.block.outer_cleanups)
940 : 0);
941 fixup->next = goto_fixup_chain;
942 goto_fixup_chain = fixup;
945 return block != 0;
948 /* Expand any needed fixups in the outputmost binding level of the
949 function. FIRST_INSN is the first insn in the function. */
951 void
952 expand_fixups (first_insn)
953 rtx first_insn;
955 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
958 /* When exiting a binding contour, process all pending gotos requiring fixups.
959 THISBLOCK is the structure that describes the block being exited.
960 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
961 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
962 FIRST_INSN is the insn that began this contour.
964 Gotos that jump out of this contour must restore the
965 stack level and do the cleanups before actually jumping.
967 DONT_JUMP_IN positive means report error if there is a jump into this
968 contour from before the beginning of the contour. This is also done if
969 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
971 static void
972 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
973 struct nesting *thisblock;
974 rtx stack_level;
975 tree cleanup_list;
976 rtx first_insn;
977 int dont_jump_in;
979 struct goto_fixup *f, *prev;
981 /* F is the fixup we are considering; PREV is the previous one. */
982 /* We run this loop in two passes so that cleanups of exited blocks
983 are run first, and blocks that are exited are marked so
984 afterwards. */
986 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
988 /* Test for a fixup that is inactive because it is already handled. */
989 if (f->before_jump == 0)
991 /* Delete inactive fixup from the chain, if that is easy to do. */
992 if (prev != 0)
993 prev->next = f->next;
995 /* Has this fixup's target label been defined?
996 If so, we can finalize it. */
997 else if (PREV_INSN (f->target_rtl) != 0)
999 rtx cleanup_insns;
1001 /* If this fixup jumped into this contour from before the beginning
1002 of this contour, report an error. This code used to use
1003 the first non-label insn after f->target_rtl, but that's
1004 wrong since such can be added, by things like put_var_into_stack
1005 and have INSN_UIDs that are out of the range of the block. */
1006 /* ??? Bug: this does not detect jumping in through intermediate
1007 blocks that have stack levels or cleanups.
1008 It detects only a problem with the innermost block
1009 around the label. */
1010 if (f->target != 0
1011 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
1012 || cleanup_list)
1013 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1014 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1015 && ! DECL_ERROR_ISSUED (f->target))
1017 error_with_decl (f->target,
1018 "label `%s' used before containing binding contour");
1019 /* Prevent multiple errors for one label. */
1020 DECL_ERROR_ISSUED (f->target) = 1;
1023 /* We will expand the cleanups into a sequence of their own and
1024 then later on we will attach this new sequence to the insn
1025 stream just ahead of the actual jump insn. */
1027 start_sequence ();
1029 /* Temporarily restore the lexical context where we will
1030 logically be inserting the fixup code. We do this for the
1031 sake of getting the debugging information right. */
1033 (*lang_hooks.decls.pushlevel) (0);
1034 (*lang_hooks.decls.set_block) (f->context);
1036 /* Expand the cleanups for blocks this jump exits. */
1037 if (f->cleanup_list_list)
1039 tree lists;
1040 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1041 /* Marked elements correspond to blocks that have been closed.
1042 Do their cleanups. */
1043 if (TREE_ADDRESSABLE (lists)
1044 && TREE_VALUE (lists) != 0)
1046 expand_cleanups (TREE_VALUE (lists), 1, 1);
1047 /* Pop any pushes done in the cleanups,
1048 in case function is about to return. */
1049 do_pending_stack_adjust ();
1053 /* Restore stack level for the biggest contour that this
1054 jump jumps out of. */
1055 if (f->stack_level
1056 && ! (f->target_rtl == return_label
1057 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1058 == FUNCTION_TYPE)
1059 && (TYPE_RETURNS_STACK_DEPRESSED
1060 (TREE_TYPE (current_function_decl))))))
1061 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1063 /* Finish up the sequence containing the insns which implement the
1064 necessary cleanups, and then attach that whole sequence to the
1065 insn stream just ahead of the actual jump insn. Attaching it
1066 at that point insures that any cleanups which are in fact
1067 implicit C++ object destructions (which must be executed upon
1068 leaving the block) appear (to the debugger) to be taking place
1069 in an area of the generated code where the object(s) being
1070 destructed are still "in scope". */
1072 cleanup_insns = get_insns ();
1073 (*lang_hooks.decls.poplevel) (1, 0, 0);
1075 end_sequence ();
1076 emit_insn_after (cleanup_insns, f->before_jump);
1078 f->before_jump = 0;
1082 /* For any still-undefined labels, do the cleanups for this block now.
1083 We must do this now since items in the cleanup list may go out
1084 of scope when the block ends. */
1085 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1086 if (f->before_jump != 0
1087 && PREV_INSN (f->target_rtl) == 0
1088 /* Label has still not appeared. If we are exiting a block with
1089 a stack level to restore, that started before the fixup,
1090 mark this stack level as needing restoration
1091 when the fixup is later finalized. */
1092 && thisblock != 0
1093 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1094 means the label is undefined. That's erroneous, but possible. */
1095 && (thisblock->data.block.block_start_count
1096 <= f->block_start_count))
1098 tree lists = f->cleanup_list_list;
1099 rtx cleanup_insns;
1101 for (; lists; lists = TREE_CHAIN (lists))
1102 /* If the following elt. corresponds to our containing block
1103 then the elt. must be for this block. */
1104 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1106 start_sequence ();
1107 (*lang_hooks.decls.pushlevel) (0);
1108 (*lang_hooks.decls.set_block) (f->context);
1109 expand_cleanups (TREE_VALUE (lists), 1, 1);
1110 do_pending_stack_adjust ();
1111 cleanup_insns = get_insns ();
1112 (*lang_hooks.decls.poplevel) (1, 0, 0);
1113 end_sequence ();
1114 if (cleanup_insns != 0)
1115 f->before_jump
1116 = emit_insn_after (cleanup_insns, f->before_jump);
1118 f->cleanup_list_list = TREE_CHAIN (lists);
1121 if (stack_level)
1122 f->stack_level = stack_level;
1126 /* Return the number of times character C occurs in string S. */
1127 static int
1128 n_occurrences (c, s)
1129 int c;
1130 const char *s;
1132 int n = 0;
1133 while (*s)
1134 n += (*s++ == c);
1135 return n;
1138 /* Generate RTL for an asm statement (explicit assembler code).
1139 STRING is a STRING_CST node containing the assembler code text,
1140 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1141 insn is volatile; don't optimize it. */
1143 void
1144 expand_asm (string, vol)
1145 tree string;
1146 int vol;
1148 rtx body;
1150 if (TREE_CODE (string) == ADDR_EXPR)
1151 string = TREE_OPERAND (string, 0);
1153 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1155 MEM_VOLATILE_P (body) = vol;
1157 emit_insn (body);
1159 clear_last_expr ();
1162 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1163 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1164 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1165 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1166 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1167 constraint allows the use of a register operand. And, *IS_INOUT
1168 will be true if the operand is read-write, i.e., if it is used as
1169 an input as well as an output. If *CONSTRAINT_P is not in
1170 canonical form, it will be made canonical. (Note that `+' will be
1171 replaced with `=' as part of this process.)
1173 Returns TRUE if all went well; FALSE if an error occurred. */
1175 bool
1176 parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1177 allows_mem, allows_reg, is_inout)
1178 const char **constraint_p;
1179 int operand_num;
1180 int ninputs;
1181 int noutputs;
1182 bool *allows_mem;
1183 bool *allows_reg;
1184 bool *is_inout;
1186 const char *constraint = *constraint_p;
1187 const char *p;
1189 /* Assume the constraint doesn't allow the use of either a register
1190 or memory. */
1191 *allows_mem = false;
1192 *allows_reg = false;
1194 /* Allow the `=' or `+' to not be at the beginning of the string,
1195 since it wasn't explicitly documented that way, and there is a
1196 large body of code that puts it last. Swap the character to
1197 the front, so as not to uglify any place else. */
1198 p = strchr (constraint, '=');
1199 if (!p)
1200 p = strchr (constraint, '+');
1202 /* If the string doesn't contain an `=', issue an error
1203 message. */
1204 if (!p)
1206 error ("output operand constraint lacks `='");
1207 return false;
1210 /* If the constraint begins with `+', then the operand is both read
1211 from and written to. */
1212 *is_inout = (*p == '+');
1214 /* Canonicalize the output constraint so that it begins with `='. */
1215 if (p != constraint || is_inout)
1217 char *buf;
1218 size_t c_len = strlen (constraint);
1220 if (p != constraint)
1221 warning ("output constraint `%c' for operand %d is not at the beginning",
1222 *p, operand_num);
1224 /* Make a copy of the constraint. */
1225 buf = alloca (c_len + 1);
1226 strcpy (buf, constraint);
1227 /* Swap the first character and the `=' or `+'. */
1228 buf[p - constraint] = buf[0];
1229 /* Make sure the first character is an `='. (Until we do this,
1230 it might be a `+'.) */
1231 buf[0] = '=';
1232 /* Replace the constraint with the canonicalized string. */
1233 *constraint_p = ggc_alloc_string (buf, c_len);
1234 constraint = *constraint_p;
1237 /* Loop through the constraint string. */
1238 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1239 switch (*p)
1241 case '+':
1242 case '=':
1243 error ("operand constraint contains incorrectly positioned '+' or '='");
1244 return false;
1246 case '%':
1247 if (operand_num + 1 == ninputs + noutputs)
1249 error ("`%%' constraint used with last operand");
1250 return false;
1252 break;
1254 case 'V': case 'm': case 'o':
1255 *allows_mem = true;
1256 break;
1258 case '?': case '!': case '*': case '&': case '#':
1259 case 'E': case 'F': case 'G': case 'H':
1260 case 's': case 'i': case 'n':
1261 case 'I': case 'J': case 'K': case 'L': case 'M':
1262 case 'N': case 'O': case 'P': case ',':
1263 break;
1265 case '0': case '1': case '2': case '3': case '4':
1266 case '5': case '6': case '7': case '8': case '9':
1267 case '[':
1268 error ("matching constraint not valid in output operand");
1269 return false;
1271 case '<': case '>':
1272 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1273 excepting those that expand_call created. So match memory
1274 and hope. */
1275 *allows_mem = true;
1276 break;
1278 case 'g': case 'X':
1279 *allows_reg = true;
1280 *allows_mem = true;
1281 break;
1283 case 'p': case 'r':
1284 *allows_reg = true;
1285 break;
1287 default:
1288 if (!ISALPHA (*p))
1289 break;
1290 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1291 *allows_reg = true;
1292 #ifdef EXTRA_CONSTRAINT_STR
1293 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1294 *allows_reg = true;
1295 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1296 *allows_mem = true;
1297 else
1299 /* Otherwise we can't assume anything about the nature of
1300 the constraint except that it isn't purely registers.
1301 Treat it like "g" and hope for the best. */
1302 *allows_reg = true;
1303 *allows_mem = true;
1305 #endif
1306 break;
1309 return true;
1312 /* Similar, but for input constraints. */
1314 static bool
1315 parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1316 constraints, allows_mem, allows_reg)
1317 const char **constraint_p;
1318 int input_num;
1319 int ninputs;
1320 int noutputs;
1321 int ninout;
1322 const char * const * constraints;
1323 bool *allows_mem;
1324 bool *allows_reg;
1326 const char *constraint = *constraint_p;
1327 const char *orig_constraint = constraint;
1328 size_t c_len = strlen (constraint);
1329 size_t j;
1331 /* Assume the constraint doesn't allow the use of either
1332 a register or memory. */
1333 *allows_mem = false;
1334 *allows_reg = false;
1336 /* Make sure constraint has neither `=', `+', nor '&'. */
1338 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1339 switch (constraint[j])
1341 case '+': case '=': case '&':
1342 if (constraint == orig_constraint)
1344 error ("input operand constraint contains `%c'", constraint[j]);
1345 return false;
1347 break;
1349 case '%':
1350 if (constraint == orig_constraint
1351 && input_num + 1 == ninputs - ninout)
1353 error ("`%%' constraint used with last operand");
1354 return false;
1356 break;
1358 case 'V': case 'm': case 'o':
1359 *allows_mem = true;
1360 break;
1362 case '<': case '>':
1363 case '?': case '!': case '*': case '#':
1364 case 'E': case 'F': case 'G': case 'H':
1365 case 's': case 'i': case 'n':
1366 case 'I': case 'J': case 'K': case 'L': case 'M':
1367 case 'N': case 'O': case 'P': case ',':
1368 break;
1370 /* Whether or not a numeric constraint allows a register is
1371 decided by the matching constraint, and so there is no need
1372 to do anything special with them. We must handle them in
1373 the default case, so that we don't unnecessarily force
1374 operands to memory. */
1375 case '0': case '1': case '2': case '3': case '4':
1376 case '5': case '6': case '7': case '8': case '9':
1378 char *end;
1379 unsigned long match;
1381 match = strtoul (constraint + j, &end, 10);
1382 if (match >= (unsigned long) noutputs)
1384 error ("matching constraint references invalid operand number");
1385 return false;
1388 /* Try and find the real constraint for this dup. Only do this
1389 if the matching constraint is the only alternative. */
1390 if (*end == '\0'
1391 && (j == 0 || (j == 1 && constraint[0] == '%')))
1393 constraint = constraints[match];
1394 *constraint_p = constraint;
1395 c_len = strlen (constraint);
1396 j = 0;
1397 /* ??? At the end of the loop, we will skip the first part of
1398 the matched constraint. This assumes not only that the
1399 other constraint is an output constraint, but also that
1400 the '=' or '+' come first. */
1401 break;
1403 else
1404 j = end - constraint;
1405 /* Anticipate increment at end of loop. */
1406 j--;
1408 /* Fall through. */
1410 case 'p': case 'r':
1411 *allows_reg = true;
1412 break;
1414 case 'g': case 'X':
1415 *allows_reg = true;
1416 *allows_mem = true;
1417 break;
1419 default:
1420 if (! ISALPHA (constraint[j]))
1422 error ("invalid punctuation `%c' in constraint", constraint[j]);
1423 return false;
1425 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1426 != NO_REGS)
1427 *allows_reg = true;
1428 #ifdef EXTRA_CONSTRAINT_STR
1429 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1430 *allows_reg = true;
1431 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1432 *allows_mem = true;
1433 else
1435 /* Otherwise we can't assume anything about the nature of
1436 the constraint except that it isn't purely registers.
1437 Treat it like "g" and hope for the best. */
1438 *allows_reg = true;
1439 *allows_mem = true;
1441 #endif
1442 break;
1445 return true;
1448 /* Check for overlap between registers marked in CLOBBERED_REGS and
1449 anything inappropriate in DECL. Emit error and return TRUE for error,
1450 FALSE for ok. */
1452 static bool
1453 decl_conflicts_with_clobbers_p (decl, clobbered_regs)
1454 tree decl;
1455 const HARD_REG_SET clobbered_regs;
1457 /* Conflicts between asm-declared register variables and the clobber
1458 list are not allowed. */
1459 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1460 && DECL_REGISTER (decl)
1461 && REG_P (DECL_RTL (decl))
1462 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1464 rtx reg = DECL_RTL (decl);
1465 unsigned int regno;
1467 for (regno = REGNO (reg);
1468 regno < (REGNO (reg)
1469 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1470 regno++)
1471 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1473 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1474 IDENTIFIER_POINTER (DECL_NAME (decl)));
1476 /* Reset registerness to stop multiple errors emitted for a
1477 single variable. */
1478 DECL_REGISTER (decl) = 0;
1479 return true;
1482 return false;
1485 /* Generate RTL for an asm statement with arguments.
1486 STRING is the instruction template.
1487 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1488 Each output or input has an expression in the TREE_VALUE and
1489 and a tree list in TREE_PURPOSE which in turn contains a constraint
1490 name in TREE_VALUE (or NULL_TREE) and a constraint string
1491 in TREE_PURPOSE.
1492 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1493 that is clobbered by this insn.
1495 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1496 Some elements of OUTPUTS may be replaced with trees representing temporary
1497 values. The caller should copy those temporary values to the originally
1498 specified lvalues.
1500 VOL nonzero means the insn is volatile; don't optimize it. */
1502 void
1503 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1504 tree string, outputs, inputs, clobbers;
1505 int vol;
1506 const char *filename;
1507 int line;
1509 rtvec argvec, constraintvec;
1510 rtx body;
1511 int ninputs = list_length (inputs);
1512 int noutputs = list_length (outputs);
1513 int ninout;
1514 int nclobbers;
1515 HARD_REG_SET clobbered_regs;
1516 int clobber_conflict_found = 0;
1517 tree tail;
1518 tree t;
1519 int i;
1520 /* Vector of RTX's of evaluated output operands. */
1521 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1522 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1523 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1524 enum machine_mode *inout_mode
1525 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1526 const char **constraints
1527 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1528 int old_generating_concat_p = generating_concat_p;
1530 /* An ASM with no outputs needs to be treated as volatile, for now. */
1531 if (noutputs == 0)
1532 vol = 1;
1534 if (! check_operand_nalternatives (outputs, inputs))
1535 return;
1537 if (! check_unique_operand_names (outputs, inputs))
1538 return;
1540 string = resolve_asm_operand_names (string, outputs, inputs);
1542 /* Collect constraints. */
1543 i = 0;
1544 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1545 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1546 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1547 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1549 #ifdef MD_ASM_CLOBBERS
1550 /* Sometimes we wish to automatically clobber registers across an asm.
1551 Case in point is when the i386 backend moved from cc0 to a hard reg --
1552 maintaining source-level compatibility means automatically clobbering
1553 the flags register. */
1554 MD_ASM_CLOBBERS (clobbers);
1555 #endif
1557 /* Count the number of meaningful clobbered registers, ignoring what
1558 we would ignore later. */
1559 nclobbers = 0;
1560 CLEAR_HARD_REG_SET (clobbered_regs);
1561 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1563 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1565 i = decode_reg_name (regname);
1566 if (i >= 0 || i == -4)
1567 ++nclobbers;
1568 else if (i == -2)
1569 error ("unknown register name `%s' in `asm'", regname);
1571 /* Mark clobbered registers. */
1572 if (i >= 0)
1574 /* Clobbering the PIC register is an error */
1575 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1577 error ("PIC register `%s' clobbered in `asm'", regname);
1578 return;
1581 SET_HARD_REG_BIT (clobbered_regs, i);
1585 clear_last_expr ();
1587 /* First pass over inputs and outputs checks validity and sets
1588 mark_addressable if needed. */
1590 ninout = 0;
1591 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1593 tree val = TREE_VALUE (tail);
1594 tree type = TREE_TYPE (val);
1595 const char *constraint;
1596 bool is_inout;
1597 bool allows_reg;
1598 bool allows_mem;
1600 /* If there's an erroneous arg, emit no insn. */
1601 if (type == error_mark_node)
1602 return;
1604 /* Try to parse the output constraint. If that fails, there's
1605 no point in going further. */
1606 constraint = constraints[i];
1607 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1608 &allows_mem, &allows_reg, &is_inout))
1609 return;
1611 if (! allows_reg
1612 && (allows_mem
1613 || is_inout
1614 || (DECL_P (val)
1615 && GET_CODE (DECL_RTL (val)) == REG
1616 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1617 (*lang_hooks.mark_addressable) (val);
1619 if (is_inout)
1620 ninout++;
1623 ninputs += ninout;
1624 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1626 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1627 return;
1630 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1632 bool allows_reg, allows_mem;
1633 const char *constraint;
1635 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1636 would get VOIDmode and that could cause a crash in reload. */
1637 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1638 return;
1640 constraint = constraints[i + noutputs];
1641 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1642 constraints, &allows_mem, &allows_reg))
1643 return;
1645 if (! allows_reg && allows_mem)
1646 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1649 /* Second pass evaluates arguments. */
1651 ninout = 0;
1652 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1654 tree val = TREE_VALUE (tail);
1655 tree type = TREE_TYPE (val);
1656 bool is_inout;
1657 bool allows_reg;
1658 bool allows_mem;
1659 rtx op;
1661 if (!parse_output_constraint (&constraints[i], i, ninputs,
1662 noutputs, &allows_mem, &allows_reg,
1663 &is_inout))
1664 abort ();
1666 /* If an output operand is not a decl or indirect ref and our constraint
1667 allows a register, make a temporary to act as an intermediate.
1668 Make the asm insn write into that, then our caller will copy it to
1669 the real output operand. Likewise for promoted variables. */
1671 generating_concat_p = 0;
1673 real_output_rtx[i] = NULL_RTX;
1674 if ((TREE_CODE (val) == INDIRECT_REF
1675 && allows_mem)
1676 || (DECL_P (val)
1677 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1678 && ! (GET_CODE (DECL_RTL (val)) == REG
1679 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1680 || ! allows_reg
1681 || is_inout)
1683 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1684 if (GET_CODE (op) == MEM)
1685 op = validize_mem (op);
1687 if (! allows_reg && GET_CODE (op) != MEM)
1688 error ("output number %d not directly addressable", i);
1689 if ((! allows_mem && GET_CODE (op) == MEM)
1690 || GET_CODE (op) == CONCAT)
1692 real_output_rtx[i] = protect_from_queue (op, 1);
1693 op = gen_reg_rtx (GET_MODE (op));
1694 if (is_inout)
1695 emit_move_insn (op, real_output_rtx[i]);
1698 else
1700 op = assign_temp (type, 0, 0, 1);
1701 op = validize_mem (op);
1702 TREE_VALUE (tail) = make_tree (type, op);
1704 output_rtx[i] = op;
1706 generating_concat_p = old_generating_concat_p;
1708 if (is_inout)
1710 inout_mode[ninout] = TYPE_MODE (type);
1711 inout_opnum[ninout++] = i;
1714 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1715 clobber_conflict_found = 1;
1718 /* Make vectors for the expression-rtx, constraint strings,
1719 and named operands. */
1721 argvec = rtvec_alloc (ninputs);
1722 constraintvec = rtvec_alloc (ninputs);
1724 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1725 : GET_MODE (output_rtx[0])),
1726 TREE_STRING_POINTER (string),
1727 empty_string, 0, argvec, constraintvec,
1728 filename, line);
1730 MEM_VOLATILE_P (body) = vol;
1732 /* Eval the inputs and put them into ARGVEC.
1733 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1735 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1737 bool allows_reg, allows_mem;
1738 const char *constraint;
1739 tree val, type;
1740 rtx op;
1742 constraint = constraints[i + noutputs];
1743 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1744 constraints, &allows_mem, &allows_reg))
1745 abort ();
1747 generating_concat_p = 0;
1749 val = TREE_VALUE (tail);
1750 type = TREE_TYPE (val);
1751 op = expand_expr (val, NULL_RTX, VOIDmode,
1752 (allows_mem && !allows_reg
1753 ? EXPAND_MEMORY : EXPAND_NORMAL));
1755 /* Never pass a CONCAT to an ASM. */
1756 if (GET_CODE (op) == CONCAT)
1757 op = force_reg (GET_MODE (op), op);
1758 else if (GET_CODE (op) == MEM)
1759 op = validize_mem (op);
1761 if (asm_operand_ok (op, constraint) <= 0)
1763 if (allows_reg)
1764 op = force_reg (TYPE_MODE (type), op);
1765 else if (!allows_mem)
1766 warning ("asm operand %d probably doesn't match constraints",
1767 i + noutputs);
1768 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1770 /* We won't recognize volatile memory as available a
1771 memory_operand at this point. Ignore it. */
1773 else
1775 warning ("use of memory input without lvalue in "
1776 "asm operand %d is deprecated", i + noutputs);
1778 if (CONSTANT_P (op))
1780 op = force_const_mem (TYPE_MODE (type), op);
1781 op = validize_mem (op);
1783 else if (GET_CODE (op) == REG
1784 || GET_CODE (op) == SUBREG
1785 || GET_CODE (op) == ADDRESSOF
1786 || GET_CODE (op) == CONCAT)
1788 tree qual_type = build_qualified_type (type,
1789 (TYPE_QUALS (type)
1790 | TYPE_QUAL_CONST));
1791 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1792 memloc = validize_mem (memloc);
1793 emit_move_insn (memloc, op);
1794 op = memloc;
1799 generating_concat_p = old_generating_concat_p;
1800 ASM_OPERANDS_INPUT (body, i) = op;
1802 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1803 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1805 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1806 clobber_conflict_found = 1;
1809 /* Protect all the operands from the queue now that they have all been
1810 evaluated. */
1812 generating_concat_p = 0;
1814 for (i = 0; i < ninputs - ninout; i++)
1815 ASM_OPERANDS_INPUT (body, i)
1816 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1818 for (i = 0; i < noutputs; i++)
1819 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1821 /* For in-out operands, copy output rtx to input rtx. */
1822 for (i = 0; i < ninout; i++)
1824 int j = inout_opnum[i];
1825 char buffer[16];
1827 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1828 = output_rtx[j];
1830 sprintf (buffer, "%d", j);
1831 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1832 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1835 generating_concat_p = old_generating_concat_p;
1837 /* Now, for each output, construct an rtx
1838 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1839 ARGVEC CONSTRAINTS OPNAMES))
1840 If there is more than one, put them inside a PARALLEL. */
1842 if (noutputs == 1 && nclobbers == 0)
1844 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1845 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1848 else if (noutputs == 0 && nclobbers == 0)
1850 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1851 emit_insn (body);
1854 else
1856 rtx obody = body;
1857 int num = noutputs;
1859 if (num == 0)
1860 num = 1;
1862 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1864 /* For each output operand, store a SET. */
1865 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1867 XVECEXP (body, 0, i)
1868 = gen_rtx_SET (VOIDmode,
1869 output_rtx[i],
1870 gen_rtx_ASM_OPERANDS
1871 (GET_MODE (output_rtx[i]),
1872 TREE_STRING_POINTER (string),
1873 constraints[i], i, argvec, constraintvec,
1874 filename, line));
1876 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1879 /* If there are no outputs (but there are some clobbers)
1880 store the bare ASM_OPERANDS into the PARALLEL. */
1882 if (i == 0)
1883 XVECEXP (body, 0, i++) = obody;
1885 /* Store (clobber REG) for each clobbered register specified. */
1887 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1889 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1890 int j = decode_reg_name (regname);
1891 rtx clobbered_reg;
1893 if (j < 0)
1895 if (j == -3) /* `cc', which is not a register */
1896 continue;
1898 if (j == -4) /* `memory', don't cache memory across asm */
1900 XVECEXP (body, 0, i++)
1901 = gen_rtx_CLOBBER (VOIDmode,
1902 gen_rtx_MEM
1903 (BLKmode,
1904 gen_rtx_SCRATCH (VOIDmode)));
1905 continue;
1908 /* Ignore unknown register, error already signaled. */
1909 continue;
1912 /* Use QImode since that's guaranteed to clobber just one reg. */
1913 clobbered_reg = gen_rtx_REG (QImode, j);
1915 /* Do sanity check for overlap between clobbers and respectively
1916 input and outputs that hasn't been handled. Such overlap
1917 should have been detected and reported above. */
1918 if (!clobber_conflict_found)
1920 int opno;
1922 /* We test the old body (obody) contents to avoid tripping
1923 over the under-construction body. */
1924 for (opno = 0; opno < noutputs; opno++)
1925 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1926 internal_error ("asm clobber conflict with output operand");
1928 for (opno = 0; opno < ninputs - ninout; opno++)
1929 if (reg_overlap_mentioned_p (clobbered_reg,
1930 ASM_OPERANDS_INPUT (obody, opno)))
1931 internal_error ("asm clobber conflict with input operand");
1934 XVECEXP (body, 0, i++)
1935 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1938 emit_insn (body);
1941 /* For any outputs that needed reloading into registers, spill them
1942 back to where they belong. */
1943 for (i = 0; i < noutputs; ++i)
1944 if (real_output_rtx[i])
1945 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1947 free_temp_slots ();
1950 /* A subroutine of expand_asm_operands. Check that all operands have
1951 the same number of alternatives. Return true if so. */
1953 static bool
1954 check_operand_nalternatives (outputs, inputs)
1955 tree outputs, inputs;
1957 if (outputs || inputs)
1959 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1960 int nalternatives
1961 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1962 tree next = inputs;
1964 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1966 error ("too many alternatives in `asm'");
1967 return false;
1970 tmp = outputs;
1971 while (tmp)
1973 const char *constraint
1974 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1976 if (n_occurrences (',', constraint) != nalternatives)
1978 error ("operand constraints for `asm' differ in number of alternatives");
1979 return false;
1982 if (TREE_CHAIN (tmp))
1983 tmp = TREE_CHAIN (tmp);
1984 else
1985 tmp = next, next = 0;
1989 return true;
1992 /* A subroutine of expand_asm_operands. Check that all operand names
1993 are unique. Return true if so. We rely on the fact that these names
1994 are identifiers, and so have been canonicalized by get_identifier,
1995 so all we need are pointer comparisons. */
1997 static bool
1998 check_unique_operand_names (outputs, inputs)
1999 tree outputs, inputs;
2001 tree i, j;
2003 for (i = outputs; i ; i = TREE_CHAIN (i))
2005 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2006 if (! i_name)
2007 continue;
2009 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2010 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
2011 goto failure;
2014 for (i = inputs; i ; i = TREE_CHAIN (i))
2016 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2017 if (! i_name)
2018 continue;
2020 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2021 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
2022 goto failure;
2023 for (j = outputs; j ; j = TREE_CHAIN (j))
2024 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
2025 goto failure;
2028 return true;
2030 failure:
2031 error ("duplicate asm operand name '%s'",
2032 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
2033 return false;
2036 /* A subroutine of expand_asm_operands. Resolve the names of the operands
2037 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
2038 STRING and in the constraints to those numbers. */
2040 tree
2041 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
2043 char *buffer;
2044 char *p;
2045 tree t;
2047 /* Substitute [<name>] in input constraint strings. There should be no
2048 named operands in output constraints. */
2049 for (t = inputs; t ; t = TREE_CHAIN (t))
2051 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2052 if (strchr (c, '[') != NULL)
2054 p = buffer = xstrdup (c);
2055 while ((p = strchr (p, '[')) != NULL)
2056 p = resolve_operand_name_1 (p, outputs, inputs);
2057 TREE_VALUE (TREE_PURPOSE (t))
2058 = build_string (strlen (buffer), buffer);
2059 free (buffer);
2063 if (strchr (TREE_STRING_POINTER (string), '[') == NULL)
2064 return string;
2066 /* Assume that we will not need extra space to perform the substitution.
2067 This because we get to remove '[' and ']', which means we cannot have
2068 a problem until we have more than 999 operands. */
2070 p = buffer = xstrdup (TREE_STRING_POINTER (string));
2071 while ((p = strchr (p, '%')) != NULL)
2073 if (p[1] == '[')
2074 p += 1;
2075 else if (ISALPHA (p[1]) && p[2] == '[')
2076 p += 2;
2077 else
2079 p += 1;
2080 continue;
2083 p = resolve_operand_name_1 (p, outputs, inputs);
2086 string = build_string (strlen (buffer), buffer);
2087 free (buffer);
2089 return string;
2092 /* A subroutine of resolve_operand_names. P points to the '[' for a
2093 potential named operand of the form [<name>]. In place, replace
2094 the name and brackets with a number. Return a pointer to the
2095 balance of the string after substitution. */
2097 static char *
2098 resolve_operand_name_1 (p, outputs, inputs)
2099 char *p;
2100 tree outputs, inputs;
2102 char *q;
2103 int op;
2104 tree t;
2105 size_t len;
2107 /* Collect the operand name. */
2108 q = strchr (p, ']');
2109 if (!q)
2111 error ("missing close brace for named operand");
2112 return strchr (p, '\0');
2114 len = q - p - 1;
2116 /* Resolve the name to a number. */
2117 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2119 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2120 if (name)
2122 const char *c = TREE_STRING_POINTER (name);
2123 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2124 goto found;
2127 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2129 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2130 if (name)
2132 const char *c = TREE_STRING_POINTER (name);
2133 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2134 goto found;
2138 *q = '\0';
2139 error ("undefined named operand '%s'", p + 1);
2140 op = 0;
2141 found:
2143 /* Replace the name with the number. Unfortunately, not all libraries
2144 get the return value of sprintf correct, so search for the end of the
2145 generated string by hand. */
2146 sprintf (p, "%d", op);
2147 p = strchr (p, '\0');
2149 /* Verify the no extra buffer space assumption. */
2150 if (p > q)
2151 abort ();
2153 /* Shift the rest of the buffer down to fill the gap. */
2154 memmove (p, q + 1, strlen (q + 1) + 1);
2156 return p;
2159 /* Generate RTL to evaluate the expression EXP
2160 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2161 Provided just for backward-compatibility. expand_expr_stmt_value()
2162 should be used for new code. */
2164 void
2165 expand_expr_stmt (exp)
2166 tree exp;
2168 expand_expr_stmt_value (exp, -1, 1);
2171 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2172 whether to (1) save the value of the expression, (0) discard it or
2173 (-1) use expr_stmts_for_value to tell. The use of -1 is
2174 deprecated, and retained only for backward compatibility. */
2176 void
2177 expand_expr_stmt_value (exp, want_value, maybe_last)
2178 tree exp;
2179 int want_value, maybe_last;
2181 rtx value;
2182 tree type;
2184 if (want_value == -1)
2185 want_value = expr_stmts_for_value != 0;
2187 /* If -Wextra, warn about statements with no side effects,
2188 except for an explicit cast to void (e.g. for assert()), and
2189 except for last statement in ({...}) where they may be useful. */
2190 if (! want_value
2191 && (expr_stmts_for_value == 0 || ! maybe_last)
2192 && exp != error_mark_node)
2194 if (! TREE_SIDE_EFFECTS (exp))
2196 if (warn_unused_value
2197 && !(TREE_CODE (exp) == CONVERT_EXPR
2198 && VOID_TYPE_P (TREE_TYPE (exp))))
2199 warning ("%Hstatement with no effect", &emit_locus);
2201 else if (warn_unused_value)
2202 warn_if_unused_value (exp);
2205 /* If EXP is of function type and we are expanding statements for
2206 value, convert it to pointer-to-function. */
2207 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2208 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2210 /* The call to `expand_expr' could cause last_expr_type and
2211 last_expr_value to get reset. Therefore, we set last_expr_value
2212 and last_expr_type *after* calling expand_expr. */
2213 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2214 VOIDmode, 0);
2215 type = TREE_TYPE (exp);
2217 /* If all we do is reference a volatile value in memory,
2218 copy it to a register to be sure it is actually touched. */
2219 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2221 if (TYPE_MODE (type) == VOIDmode)
2223 else if (TYPE_MODE (type) != BLKmode)
2224 value = copy_to_reg (value);
2225 else
2227 rtx lab = gen_label_rtx ();
2229 /* Compare the value with itself to reference it. */
2230 emit_cmp_and_jump_insns (value, value, EQ,
2231 expand_expr (TYPE_SIZE (type),
2232 NULL_RTX, VOIDmode, 0),
2233 BLKmode, 0, lab);
2234 emit_label (lab);
2238 /* If this expression is part of a ({...}) and is in memory, we may have
2239 to preserve temporaries. */
2240 preserve_temp_slots (value);
2242 /* Free any temporaries used to evaluate this expression. Any temporary
2243 used as a result of this expression will already have been preserved
2244 above. */
2245 free_temp_slots ();
2247 if (want_value)
2249 last_expr_value = value;
2250 last_expr_type = type;
2253 emit_queue ();
2256 /* Warn if EXP contains any computations whose results are not used.
2257 Return 1 if a warning is printed; 0 otherwise. */
2260 warn_if_unused_value (exp)
2261 tree exp;
2263 if (TREE_USED (exp))
2264 return 0;
2266 /* Don't warn about void constructs. This includes casting to void,
2267 void function calls, and statement expressions with a final cast
2268 to void. */
2269 if (VOID_TYPE_P (TREE_TYPE (exp)))
2270 return 0;
2272 switch (TREE_CODE (exp))
2274 case PREINCREMENT_EXPR:
2275 case POSTINCREMENT_EXPR:
2276 case PREDECREMENT_EXPR:
2277 case POSTDECREMENT_EXPR:
2278 case MODIFY_EXPR:
2279 case INIT_EXPR:
2280 case TARGET_EXPR:
2281 case CALL_EXPR:
2282 case METHOD_CALL_EXPR:
2283 case RTL_EXPR:
2284 case TRY_CATCH_EXPR:
2285 case WITH_CLEANUP_EXPR:
2286 case EXIT_EXPR:
2287 return 0;
2289 case BIND_EXPR:
2290 /* For a binding, warn if no side effect within it. */
2291 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2293 case SAVE_EXPR:
2294 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2296 case TRUTH_ORIF_EXPR:
2297 case TRUTH_ANDIF_EXPR:
2298 /* In && or ||, warn if 2nd operand has no side effect. */
2299 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2301 case COMPOUND_EXPR:
2302 if (TREE_NO_UNUSED_WARNING (exp))
2303 return 0;
2304 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2305 return 1;
2306 /* Let people do `(foo (), 0)' without a warning. */
2307 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2308 return 0;
2309 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2311 case NOP_EXPR:
2312 case CONVERT_EXPR:
2313 case NON_LVALUE_EXPR:
2314 /* Don't warn about conversions not explicit in the user's program. */
2315 if (TREE_NO_UNUSED_WARNING (exp))
2316 return 0;
2317 /* Assignment to a cast usually results in a cast of a modify.
2318 Don't complain about that. There can be an arbitrary number of
2319 casts before the modify, so we must loop until we find the first
2320 non-cast expression and then test to see if that is a modify. */
2322 tree tem = TREE_OPERAND (exp, 0);
2324 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2325 tem = TREE_OPERAND (tem, 0);
2327 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2328 || TREE_CODE (tem) == CALL_EXPR)
2329 return 0;
2331 goto maybe_warn;
2333 case INDIRECT_REF:
2334 /* Don't warn about automatic dereferencing of references, since
2335 the user cannot control it. */
2336 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2337 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2338 /* Fall through. */
2340 default:
2341 /* Referencing a volatile value is a side effect, so don't warn. */
2342 if ((DECL_P (exp)
2343 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2344 && TREE_THIS_VOLATILE (exp))
2345 return 0;
2347 /* If this is an expression which has no operands, there is no value
2348 to be unused. There are no such language-independent codes,
2349 but front ends may define such. */
2350 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2351 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2352 return 0;
2354 maybe_warn:
2355 /* If this is an expression with side effects, don't warn. */
2356 if (TREE_SIDE_EFFECTS (exp))
2357 return 0;
2359 warning ("%Hvalue computed is not used", &emit_locus);
2360 return 1;
2364 /* Clear out the memory of the last expression evaluated. */
2366 void
2367 clear_last_expr ()
2369 last_expr_type = NULL_TREE;
2370 last_expr_value = NULL_RTX;
2373 /* Begin a statement-expression, i.e., a series of statements which
2374 may return a value. Return the RTL_EXPR for this statement expr.
2375 The caller must save that value and pass it to
2376 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2377 in the statement-expression are deallocated at the end of the
2378 expression. */
2380 tree
2381 expand_start_stmt_expr (has_scope)
2382 int has_scope;
2384 tree t;
2386 /* Make the RTL_EXPR node temporary, not momentary,
2387 so that rtl_expr_chain doesn't become garbage. */
2388 t = make_node (RTL_EXPR);
2389 do_pending_stack_adjust ();
2390 if (has_scope)
2391 start_sequence_for_rtl_expr (t);
2392 else
2393 start_sequence ();
2394 NO_DEFER_POP;
2395 expr_stmts_for_value++;
2396 return t;
2399 /* Restore the previous state at the end of a statement that returns a value.
2400 Returns a tree node representing the statement's value and the
2401 insns to compute the value.
2403 The nodes of that expression have been freed by now, so we cannot use them.
2404 But we don't want to do that anyway; the expression has already been
2405 evaluated and now we just want to use the value. So generate a RTL_EXPR
2406 with the proper type and RTL value.
2408 If the last substatement was not an expression,
2409 return something with type `void'. */
2411 tree
2412 expand_end_stmt_expr (t)
2413 tree t;
2415 OK_DEFER_POP;
2417 if (! last_expr_value || ! last_expr_type)
2419 last_expr_value = const0_rtx;
2420 last_expr_type = void_type_node;
2422 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2423 /* Remove any possible QUEUED. */
2424 last_expr_value = protect_from_queue (last_expr_value, 0);
2426 emit_queue ();
2428 TREE_TYPE (t) = last_expr_type;
2429 RTL_EXPR_RTL (t) = last_expr_value;
2430 RTL_EXPR_SEQUENCE (t) = get_insns ();
2432 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2434 end_sequence ();
2436 /* Don't consider deleting this expr or containing exprs at tree level. */
2437 TREE_SIDE_EFFECTS (t) = 1;
2438 /* Propagate volatility of the actual RTL expr. */
2439 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2441 clear_last_expr ();
2442 expr_stmts_for_value--;
2444 return t;
2447 /* Generate RTL for the start of an if-then. COND is the expression
2448 whose truth should be tested.
2450 If EXITFLAG is nonzero, this conditional is visible to
2451 `exit_something'. */
2453 void
2454 expand_start_cond (cond, exitflag)
2455 tree cond;
2456 int exitflag;
2458 struct nesting *thiscond = ALLOC_NESTING ();
2460 /* Make an entry on cond_stack for the cond we are entering. */
2462 thiscond->desc = COND_NESTING;
2463 thiscond->next = cond_stack;
2464 thiscond->all = nesting_stack;
2465 thiscond->depth = ++nesting_depth;
2466 thiscond->data.cond.next_label = gen_label_rtx ();
2467 /* Before we encounter an `else', we don't need a separate exit label
2468 unless there are supposed to be exit statements
2469 to exit this conditional. */
2470 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2471 thiscond->data.cond.endif_label = thiscond->exit_label;
2472 cond_stack = thiscond;
2473 nesting_stack = thiscond;
2475 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2478 /* Generate RTL between then-clause and the elseif-clause
2479 of an if-then-elseif-.... */
2481 void
2482 expand_start_elseif (cond)
2483 tree cond;
2485 if (cond_stack->data.cond.endif_label == 0)
2486 cond_stack->data.cond.endif_label = gen_label_rtx ();
2487 emit_jump (cond_stack->data.cond.endif_label);
2488 emit_label (cond_stack->data.cond.next_label);
2489 cond_stack->data.cond.next_label = gen_label_rtx ();
2490 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2493 /* Generate RTL between the then-clause and the else-clause
2494 of an if-then-else. */
2496 void
2497 expand_start_else ()
2499 if (cond_stack->data.cond.endif_label == 0)
2500 cond_stack->data.cond.endif_label = gen_label_rtx ();
2502 emit_jump (cond_stack->data.cond.endif_label);
2503 emit_label (cond_stack->data.cond.next_label);
2504 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2507 /* After calling expand_start_else, turn this "else" into an "else if"
2508 by providing another condition. */
2510 void
2511 expand_elseif (cond)
2512 tree cond;
2514 cond_stack->data.cond.next_label = gen_label_rtx ();
2515 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2518 /* Generate RTL for the end of an if-then.
2519 Pop the record for it off of cond_stack. */
2521 void
2522 expand_end_cond ()
2524 struct nesting *thiscond = cond_stack;
2526 do_pending_stack_adjust ();
2527 if (thiscond->data.cond.next_label)
2528 emit_label (thiscond->data.cond.next_label);
2529 if (thiscond->data.cond.endif_label)
2530 emit_label (thiscond->data.cond.endif_label);
2532 POPSTACK (cond_stack);
2533 clear_last_expr ();
2536 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2537 loop should be exited by `exit_something'. This is a loop for which
2538 `expand_continue' will jump to the top of the loop.
2540 Make an entry on loop_stack to record the labels associated with
2541 this loop. */
2543 struct nesting *
2544 expand_start_loop (exit_flag)
2545 int exit_flag;
2547 struct nesting *thisloop = ALLOC_NESTING ();
2549 /* Make an entry on loop_stack for the loop we are entering. */
2551 thisloop->desc = LOOP_NESTING;
2552 thisloop->next = loop_stack;
2553 thisloop->all = nesting_stack;
2554 thisloop->depth = ++nesting_depth;
2555 thisloop->data.loop.start_label = gen_label_rtx ();
2556 thisloop->data.loop.end_label = gen_label_rtx ();
2557 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2558 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2559 loop_stack = thisloop;
2560 nesting_stack = thisloop;
2562 do_pending_stack_adjust ();
2563 emit_queue ();
2564 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2565 emit_label (thisloop->data.loop.start_label);
2567 return thisloop;
2570 /* Like expand_start_loop but for a loop where the continuation point
2571 (for expand_continue_loop) will be specified explicitly. */
2573 struct nesting *
2574 expand_start_loop_continue_elsewhere (exit_flag)
2575 int exit_flag;
2577 struct nesting *thisloop = expand_start_loop (exit_flag);
2578 loop_stack->data.loop.continue_label = gen_label_rtx ();
2579 return thisloop;
2582 /* Begin a null, aka do { } while (0) "loop". But since the contents
2583 of said loop can still contain a break, we must frob the loop nest. */
2585 struct nesting *
2586 expand_start_null_loop ()
2588 struct nesting *thisloop = ALLOC_NESTING ();
2590 /* Make an entry on loop_stack for the loop we are entering. */
2592 thisloop->desc = LOOP_NESTING;
2593 thisloop->next = loop_stack;
2594 thisloop->all = nesting_stack;
2595 thisloop->depth = ++nesting_depth;
2596 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2597 thisloop->data.loop.end_label = gen_label_rtx ();
2598 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2599 thisloop->exit_label = thisloop->data.loop.end_label;
2600 loop_stack = thisloop;
2601 nesting_stack = thisloop;
2603 return thisloop;
2606 /* Specify the continuation point for a loop started with
2607 expand_start_loop_continue_elsewhere.
2608 Use this at the point in the code to which a continue statement
2609 should jump. */
2611 void
2612 expand_loop_continue_here ()
2614 do_pending_stack_adjust ();
2615 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2616 emit_label (loop_stack->data.loop.continue_label);
2619 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2620 Pop the block off of loop_stack. */
2622 void
2623 expand_end_loop ()
2625 rtx start_label = loop_stack->data.loop.start_label;
2626 rtx etc_note;
2627 int eh_regions, debug_blocks;
2628 bool empty_test;
2630 /* Mark the continue-point at the top of the loop if none elsewhere. */
2631 if (start_label == loop_stack->data.loop.continue_label)
2632 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2634 do_pending_stack_adjust ();
2636 /* If the loop starts with a loop exit, roll that to the end where
2637 it will optimize together with the jump back.
2639 If the loop presently looks like this (in pseudo-C):
2641 LOOP_BEG
2642 start_label:
2643 if (test) goto end_label;
2644 LOOP_END_TOP_COND
2645 body;
2646 goto start_label;
2647 end_label:
2649 transform it to look like:
2651 LOOP_BEG
2652 goto start_label;
2653 top_label:
2654 body;
2655 start_label:
2656 if (test) goto end_label;
2657 goto top_label;
2658 end_label:
2660 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2661 the end of the entry conditional. Without this, our lexical scan
2662 can't tell the difference between an entry conditional and a
2663 body conditional that exits the loop. Mistaking the two means
2664 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2665 screw up loop unrolling.
2667 Things will be oh so much better when loop optimization is done
2668 off of a proper control flow graph... */
2670 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2672 empty_test = true;
2673 eh_regions = debug_blocks = 0;
2674 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2675 if (GET_CODE (etc_note) == NOTE)
2677 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2678 break;
2680 /* We must not walk into a nested loop. */
2681 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2683 etc_note = NULL_RTX;
2684 break;
2687 /* At the same time, scan for EH region notes, as we don't want
2688 to scrog region nesting. This shouldn't happen, but... */
2689 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2690 eh_regions++;
2691 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2693 if (--eh_regions < 0)
2694 /* We've come to the end of an EH region, but never saw the
2695 beginning of that region. That means that an EH region
2696 begins before the top of the loop, and ends in the middle
2697 of it. The existence of such a situation violates a basic
2698 assumption in this code, since that would imply that even
2699 when EH_REGIONS is zero, we might move code out of an
2700 exception region. */
2701 abort ();
2704 /* Likewise for debug scopes. In this case we'll either (1) move
2705 all of the notes if they are properly nested or (2) leave the
2706 notes alone and only rotate the loop at high optimization
2707 levels when we expect to scrog debug info. */
2708 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2709 debug_blocks++;
2710 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2711 debug_blocks--;
2713 else if (INSN_P (etc_note))
2714 empty_test = false;
2716 if (etc_note
2717 && optimize
2718 && ! empty_test
2719 && eh_regions == 0
2720 && (debug_blocks == 0 || optimize >= 2)
2721 && NEXT_INSN (etc_note) != NULL_RTX
2722 && ! any_condjump_p (get_last_insn ()))
2724 /* We found one. Move everything from START to ETC to the end
2725 of the loop, and add a jump from the top of the loop. */
2726 rtx top_label = gen_label_rtx ();
2727 rtx start_move = start_label;
2729 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2730 then we want to move this note also. */
2731 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2732 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2733 start_move = PREV_INSN (start_move);
2735 emit_label_before (top_label, start_move);
2737 /* Actually move the insns. If the debug scopes are nested, we
2738 can move everything at once. Otherwise we have to move them
2739 one by one and squeeze out the block notes. */
2740 if (debug_blocks == 0)
2741 reorder_insns (start_move, etc_note, get_last_insn ());
2742 else
2744 rtx insn, next_insn;
2745 for (insn = start_move; insn; insn = next_insn)
2747 /* Figure out which insn comes after this one. We have
2748 to do this before we move INSN. */
2749 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2751 if (GET_CODE (insn) == NOTE
2752 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2753 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2754 continue;
2756 reorder_insns (insn, insn, get_last_insn ());
2760 /* Add the jump from the top of the loop. */
2761 emit_jump_insn_before (gen_jump (start_label), top_label);
2762 emit_barrier_before (top_label);
2763 start_label = top_label;
2766 emit_jump (start_label);
2767 emit_note (NULL, NOTE_INSN_LOOP_END);
2768 emit_label (loop_stack->data.loop.end_label);
2770 POPSTACK (loop_stack);
2772 clear_last_expr ();
2775 /* Finish a null loop, aka do { } while (0). */
2777 void
2778 expand_end_null_loop ()
2780 do_pending_stack_adjust ();
2781 emit_label (loop_stack->data.loop.end_label);
2783 POPSTACK (loop_stack);
2785 clear_last_expr ();
2788 /* Generate a jump to the current loop's continue-point.
2789 This is usually the top of the loop, but may be specified
2790 explicitly elsewhere. If not currently inside a loop,
2791 return 0 and do nothing; caller will print an error message. */
2794 expand_continue_loop (whichloop)
2795 struct nesting *whichloop;
2797 /* Emit information for branch prediction. */
2798 rtx note;
2800 if (flag_guess_branch_prob)
2802 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2803 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2805 clear_last_expr ();
2806 if (whichloop == 0)
2807 whichloop = loop_stack;
2808 if (whichloop == 0)
2809 return 0;
2810 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2811 NULL_RTX);
2812 return 1;
2815 /* Generate a jump to exit the current loop. If not currently inside a loop,
2816 return 0 and do nothing; caller will print an error message. */
2819 expand_exit_loop (whichloop)
2820 struct nesting *whichloop;
2822 clear_last_expr ();
2823 if (whichloop == 0)
2824 whichloop = loop_stack;
2825 if (whichloop == 0)
2826 return 0;
2827 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2828 return 1;
2831 /* Generate a conditional jump to exit the current loop if COND
2832 evaluates to zero. If not currently inside a loop,
2833 return 0 and do nothing; caller will print an error message. */
2836 expand_exit_loop_if_false (whichloop, cond)
2837 struct nesting *whichloop;
2838 tree cond;
2840 rtx label;
2841 clear_last_expr ();
2843 if (whichloop == 0)
2844 whichloop = loop_stack;
2845 if (whichloop == 0)
2846 return 0;
2848 if (integer_nonzerop (cond))
2849 return 1;
2850 if (integer_zerop (cond))
2851 return expand_exit_loop (whichloop);
2853 /* Check if we definitely won't need a fixup. */
2854 if (whichloop == nesting_stack)
2856 jumpifnot (cond, whichloop->data.loop.end_label);
2857 return 1;
2860 /* In order to handle fixups, we actually create a conditional jump
2861 around an unconditional branch to exit the loop. If fixups are
2862 necessary, they go before the unconditional branch. */
2864 label = gen_label_rtx ();
2865 jumpif (cond, label);
2866 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2867 NULL_RTX);
2868 emit_label (label);
2870 return 1;
2873 /* Like expand_exit_loop_if_false except also emit a note marking
2874 the end of the conditional. Should only be used immediately
2875 after expand_loop_start. */
2878 expand_exit_loop_top_cond (whichloop, cond)
2879 struct nesting *whichloop;
2880 tree cond;
2882 if (! expand_exit_loop_if_false (whichloop, cond))
2883 return 0;
2885 emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2886 return 1;
2889 /* Return nonzero if we should preserve sub-expressions as separate
2890 pseudos. We never do so if we aren't optimizing. We always do so
2891 if -fexpensive-optimizations.
2893 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2894 the loop may still be a small one. */
2897 preserve_subexpressions_p ()
2899 rtx insn;
2901 if (flag_expensive_optimizations)
2902 return 1;
2904 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2905 return 0;
2907 insn = get_last_insn_anywhere ();
2909 return (insn
2910 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2911 < n_non_fixed_regs * 3));
2915 /* Generate a jump to exit the current loop, conditional, binding contour
2916 or case statement. Not all such constructs are visible to this function,
2917 only those started with EXIT_FLAG nonzero. Individual languages use
2918 the EXIT_FLAG parameter to control which kinds of constructs you can
2919 exit this way.
2921 If not currently inside anything that can be exited,
2922 return 0 and do nothing; caller will print an error message. */
2925 expand_exit_something ()
2927 struct nesting *n;
2928 clear_last_expr ();
2929 for (n = nesting_stack; n; n = n->all)
2930 if (n->exit_label != 0)
2932 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2933 return 1;
2936 return 0;
2939 /* Generate RTL to return from the current function, with no value.
2940 (That is, we do not do anything about returning any value.) */
2942 void
2943 expand_null_return ()
2945 rtx last_insn;
2947 last_insn = get_last_insn ();
2949 /* If this function was declared to return a value, but we
2950 didn't, clobber the return registers so that they are not
2951 propagated live to the rest of the function. */
2952 clobber_return_register ();
2954 expand_null_return_1 (last_insn);
2957 /* Try to guess whether the value of return means error code. */
2958 static enum br_predictor
2959 return_prediction (val)
2960 rtx val;
2962 /* Different heuristics for pointers and scalars. */
2963 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2965 /* NULL is usually not returned. */
2966 if (val == const0_rtx)
2967 return PRED_NULL_RETURN;
2969 else
2971 /* Negative return values are often used to indicate
2972 errors. */
2973 if (GET_CODE (val) == CONST_INT
2974 && INTVAL (val) < 0)
2975 return PRED_NEGATIVE_RETURN;
2976 /* Constant return values are also usually erors,
2977 zero/one often mean booleans so exclude them from the
2978 heuristics. */
2979 if (CONSTANT_P (val)
2980 && (val != const0_rtx && val != const1_rtx))
2981 return PRED_CONST_RETURN;
2983 return PRED_NO_PREDICTION;
2986 /* Generate RTL to return from the current function, with value VAL. */
2988 static void
2989 expand_value_return (val)
2990 rtx val;
2992 rtx last_insn;
2993 rtx return_reg;
2994 enum br_predictor pred;
2996 if (flag_guess_branch_prob
2997 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2999 /* Emit information for branch prediction. */
3000 rtx note;
3002 note = emit_note (NULL, NOTE_INSN_PREDICTION);
3004 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
3008 last_insn = get_last_insn ();
3009 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
3011 /* Copy the value to the return location
3012 unless it's already there. */
3014 if (return_reg != val)
3016 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3017 #ifdef PROMOTE_FUNCTION_RETURN
3018 int unsignedp = TREE_UNSIGNED (type);
3019 enum machine_mode old_mode
3020 = DECL_MODE (DECL_RESULT (current_function_decl));
3021 enum machine_mode mode
3022 = promote_mode (type, old_mode, &unsignedp, 1);
3024 if (mode != old_mode)
3025 val = convert_modes (mode, old_mode, val, unsignedp);
3026 #endif
3027 if (GET_CODE (return_reg) == PARALLEL)
3028 emit_group_load (return_reg, val, int_size_in_bytes (type));
3029 else
3030 emit_move_insn (return_reg, val);
3033 expand_null_return_1 (last_insn);
3036 /* Output a return with no value. If LAST_INSN is nonzero,
3037 pretend that the return takes place after LAST_INSN. */
3039 static void
3040 expand_null_return_1 (last_insn)
3041 rtx last_insn;
3043 rtx end_label = cleanup_label ? cleanup_label : return_label;
3045 clear_pending_stack_adjust ();
3046 do_pending_stack_adjust ();
3047 clear_last_expr ();
3049 if (end_label == 0)
3050 end_label = return_label = gen_label_rtx ();
3051 expand_goto_internal (NULL_TREE, end_label, last_insn);
3054 /* Generate RTL to evaluate the expression RETVAL and return it
3055 from the current function. */
3057 void
3058 expand_return (retval)
3059 tree retval;
3061 /* If there are any cleanups to be performed, then they will
3062 be inserted following LAST_INSN. It is desirable
3063 that the last_insn, for such purposes, should be the
3064 last insn before computing the return value. Otherwise, cleanups
3065 which call functions can clobber the return value. */
3066 /* ??? rms: I think that is erroneous, because in C++ it would
3067 run destructors on variables that might be used in the subsequent
3068 computation of the return value. */
3069 rtx last_insn = 0;
3070 rtx result_rtl;
3071 rtx val = 0;
3072 tree retval_rhs;
3074 /* If function wants no value, give it none. */
3075 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3077 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3078 emit_queue ();
3079 expand_null_return ();
3080 return;
3083 if (retval == error_mark_node)
3085 /* Treat this like a return of no value from a function that
3086 returns a value. */
3087 expand_null_return ();
3088 return;
3090 else if (TREE_CODE (retval) == RESULT_DECL)
3091 retval_rhs = retval;
3092 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3093 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3094 retval_rhs = TREE_OPERAND (retval, 1);
3095 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3096 /* Recognize tail-recursive call to void function. */
3097 retval_rhs = retval;
3098 else
3099 retval_rhs = NULL_TREE;
3101 last_insn = get_last_insn ();
3103 /* Distribute return down conditional expr if either of the sides
3104 may involve tail recursion (see test below). This enhances the number
3105 of tail recursions we see. Don't do this always since it can produce
3106 sub-optimal code in some cases and we distribute assignments into
3107 conditional expressions when it would help. */
3109 if (optimize && retval_rhs != 0
3110 && frame_offset == 0
3111 && TREE_CODE (retval_rhs) == COND_EXPR
3112 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3113 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3115 rtx label = gen_label_rtx ();
3116 tree expr;
3118 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3119 start_cleanup_deferral ();
3120 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3121 DECL_RESULT (current_function_decl),
3122 TREE_OPERAND (retval_rhs, 1));
3123 TREE_SIDE_EFFECTS (expr) = 1;
3124 expand_return (expr);
3125 emit_label (label);
3127 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3128 DECL_RESULT (current_function_decl),
3129 TREE_OPERAND (retval_rhs, 2));
3130 TREE_SIDE_EFFECTS (expr) = 1;
3131 expand_return (expr);
3132 end_cleanup_deferral ();
3133 return;
3136 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3138 /* If the result is an aggregate that is being returned in one (or more)
3139 registers, load the registers here. The compiler currently can't handle
3140 copying a BLKmode value into registers. We could put this code in a
3141 more general area (for use by everyone instead of just function
3142 call/return), but until this feature is generally usable it is kept here
3143 (and in expand_call). The value must go into a pseudo in case there
3144 are cleanups that will clobber the real return register. */
3146 if (retval_rhs != 0
3147 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3148 && GET_CODE (result_rtl) == REG)
3150 int i;
3151 unsigned HOST_WIDE_INT bitpos, xbitpos;
3152 unsigned HOST_WIDE_INT big_endian_correction = 0;
3153 unsigned HOST_WIDE_INT bytes
3154 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3155 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3156 unsigned int bitsize
3157 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3158 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3159 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3160 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3161 enum machine_mode tmpmode, result_reg_mode;
3163 if (bytes == 0)
3165 expand_null_return ();
3166 return;
3169 /* Structures whose size is not a multiple of a word are aligned
3170 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3171 machine, this means we must skip the empty high order bytes when
3172 calculating the bit offset. */
3173 if (BYTES_BIG_ENDIAN
3174 && bytes % UNITS_PER_WORD)
3175 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3176 * BITS_PER_UNIT));
3178 /* Copy the structure BITSIZE bits at a time. */
3179 for (bitpos = 0, xbitpos = big_endian_correction;
3180 bitpos < bytes * BITS_PER_UNIT;
3181 bitpos += bitsize, xbitpos += bitsize)
3183 /* We need a new destination pseudo each time xbitpos is
3184 on a word boundary and when xbitpos == big_endian_correction
3185 (the first time through). */
3186 if (xbitpos % BITS_PER_WORD == 0
3187 || xbitpos == big_endian_correction)
3189 /* Generate an appropriate register. */
3190 dst = gen_reg_rtx (word_mode);
3191 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3193 /* Clear the destination before we move anything into it. */
3194 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3197 /* We need a new source operand each time bitpos is on a word
3198 boundary. */
3199 if (bitpos % BITS_PER_WORD == 0)
3200 src = operand_subword_force (result_val,
3201 bitpos / BITS_PER_WORD,
3202 BLKmode);
3204 /* Use bitpos for the source extraction (left justified) and
3205 xbitpos for the destination store (right justified). */
3206 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3207 extract_bit_field (src, bitsize,
3208 bitpos % BITS_PER_WORD, 1,
3209 NULL_RTX, word_mode, word_mode,
3210 BITS_PER_WORD),
3211 BITS_PER_WORD);
3214 /* Find the smallest integer mode large enough to hold the
3215 entire structure and use that mode instead of BLKmode
3216 on the USE insn for the return register. */
3217 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3218 tmpmode != VOIDmode;
3219 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3220 /* Have we found a large enough mode? */
3221 if (GET_MODE_SIZE (tmpmode) >= bytes)
3222 break;
3224 /* No suitable mode found. */
3225 if (tmpmode == VOIDmode)
3226 abort ();
3228 PUT_MODE (result_rtl, tmpmode);
3230 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3231 result_reg_mode = word_mode;
3232 else
3233 result_reg_mode = tmpmode;
3234 result_reg = gen_reg_rtx (result_reg_mode);
3236 emit_queue ();
3237 for (i = 0; i < n_regs; i++)
3238 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3239 result_pseudos[i]);
3241 if (tmpmode != result_reg_mode)
3242 result_reg = gen_lowpart (tmpmode, result_reg);
3244 expand_value_return (result_reg);
3246 else if (retval_rhs != 0
3247 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3248 && (GET_CODE (result_rtl) == REG
3249 || (GET_CODE (result_rtl) == PARALLEL)))
3251 /* Calculate the return value into a temporary (usually a pseudo
3252 reg). */
3253 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3254 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3256 val = assign_temp (nt, 0, 0, 1);
3257 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3258 val = force_not_mem (val);
3259 emit_queue ();
3260 /* Return the calculated value, doing cleanups first. */
3261 expand_value_return (val);
3263 else
3265 /* No cleanups or no hard reg used;
3266 calculate value into hard return reg. */
3267 expand_expr (retval, const0_rtx, VOIDmode, 0);
3268 emit_queue ();
3269 expand_value_return (result_rtl);
3273 /* Attempt to optimize a potential tail recursion call into a goto.
3274 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3275 where to place the jump to the tail recursion label.
3277 Return TRUE if the call was optimized into a goto. */
3280 optimize_tail_recursion (arguments, last_insn)
3281 tree arguments;
3282 rtx last_insn;
3284 /* Finish checking validity, and if valid emit code to set the
3285 argument variables for the new call. */
3286 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3288 if (tail_recursion_label == 0)
3290 tail_recursion_label = gen_label_rtx ();
3291 emit_label_after (tail_recursion_label,
3292 tail_recursion_reentry);
3294 emit_queue ();
3295 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3296 emit_barrier ();
3297 return 1;
3299 return 0;
3302 /* Emit code to alter this function's formal parms for a tail-recursive call.
3303 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3304 FORMALS is the chain of decls of formals.
3305 Return 1 if this can be done;
3306 otherwise return 0 and do not emit any code. */
3308 static int
3309 tail_recursion_args (actuals, formals)
3310 tree actuals, formals;
3312 tree a = actuals, f = formals;
3313 int i;
3314 rtx *argvec;
3316 /* Check that number and types of actuals are compatible
3317 with the formals. This is not always true in valid C code.
3318 Also check that no formal needs to be addressable
3319 and that all formals are scalars. */
3321 /* Also count the args. */
3323 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3325 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3326 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3327 return 0;
3328 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3329 return 0;
3331 if (a != 0 || f != 0)
3332 return 0;
3334 /* Compute all the actuals. */
3336 argvec = (rtx *) alloca (i * sizeof (rtx));
3338 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3339 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3341 /* Find which actual values refer to current values of previous formals.
3342 Copy each of them now, before any formal is changed. */
3344 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3346 int copy = 0;
3347 int j;
3348 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3349 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3351 copy = 1;
3352 break;
3354 if (copy)
3355 argvec[i] = copy_to_reg (argvec[i]);
3358 /* Store the values of the actuals into the formals. */
3360 for (f = formals, a = actuals, i = 0; f;
3361 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3363 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3364 emit_move_insn (DECL_RTL (f), argvec[i]);
3365 else
3367 rtx tmp = argvec[i];
3368 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
3369 promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3370 &unsignedp, 0);
3371 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3373 tmp = gen_reg_rtx (DECL_MODE (f));
3374 convert_move (tmp, argvec[i], unsignedp);
3376 convert_move (DECL_RTL (f), tmp, unsignedp);
3380 free_temp_slots ();
3381 return 1;
3384 /* Generate the RTL code for entering a binding contour.
3385 The variables are declared one by one, by calls to `expand_decl'.
3387 FLAGS is a bitwise or of the following flags:
3389 1 - Nonzero if this construct should be visible to
3390 `exit_something'.
3392 2 - Nonzero if this contour does not require a
3393 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3394 language-independent code should set this flag because they
3395 will not create corresponding BLOCK nodes. (There should be
3396 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3397 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3398 when expand_end_bindings is called.
3400 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3401 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3402 note. */
3404 void
3405 expand_start_bindings_and_block (flags, block)
3406 int flags;
3407 tree block;
3409 struct nesting *thisblock = ALLOC_NESTING ();
3410 rtx note;
3411 int exit_flag = ((flags & 1) != 0);
3412 int block_flag = ((flags & 2) == 0);
3414 /* If a BLOCK is supplied, then the caller should be requesting a
3415 NOTE_INSN_BLOCK_BEG note. */
3416 if (!block_flag && block)
3417 abort ();
3419 /* Create a note to mark the beginning of the block. */
3420 if (block_flag)
3422 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3423 NOTE_BLOCK (note) = block;
3425 else
3426 note = emit_note (NULL, NOTE_INSN_DELETED);
3428 /* Make an entry on block_stack for the block we are entering. */
3430 thisblock->desc = BLOCK_NESTING;
3431 thisblock->next = block_stack;
3432 thisblock->all = nesting_stack;
3433 thisblock->depth = ++nesting_depth;
3434 thisblock->data.block.stack_level = 0;
3435 thisblock->data.block.cleanups = 0;
3436 thisblock->data.block.exception_region = 0;
3437 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3439 thisblock->data.block.conditional_code = 0;
3440 thisblock->data.block.last_unconditional_cleanup = note;
3441 /* When we insert instructions after the last unconditional cleanup,
3442 we don't adjust last_insn. That means that a later add_insn will
3443 clobber the instructions we've just added. The easiest way to
3444 fix this is to just insert another instruction here, so that the
3445 instructions inserted after the last unconditional cleanup are
3446 never the last instruction. */
3447 emit_note (NULL, NOTE_INSN_DELETED);
3449 if (block_stack
3450 && !(block_stack->data.block.cleanups == NULL_TREE
3451 && block_stack->data.block.outer_cleanups == NULL_TREE))
3452 thisblock->data.block.outer_cleanups
3453 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3454 block_stack->data.block.outer_cleanups);
3455 else
3456 thisblock->data.block.outer_cleanups = 0;
3457 thisblock->data.block.label_chain = 0;
3458 thisblock->data.block.innermost_stack_block = stack_block_stack;
3459 thisblock->data.block.first_insn = note;
3460 thisblock->data.block.block_start_count = ++current_block_start_count;
3461 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3462 block_stack = thisblock;
3463 nesting_stack = thisblock;
3465 /* Make a new level for allocating stack slots. */
3466 push_temp_slots ();
3469 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3470 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3471 expand_expr are made. After we end the region, we know that all
3472 space for all temporaries that were created by TARGET_EXPRs will be
3473 destroyed and their space freed for reuse. */
3475 void
3476 expand_start_target_temps ()
3478 /* This is so that even if the result is preserved, the space
3479 allocated will be freed, as we know that it is no longer in use. */
3480 push_temp_slots ();
3482 /* Start a new binding layer that will keep track of all cleanup
3483 actions to be performed. */
3484 expand_start_bindings (2);
3486 target_temp_slot_level = temp_slot_level;
3489 void
3490 expand_end_target_temps ()
3492 expand_end_bindings (NULL_TREE, 0, 0);
3494 /* This is so that even if the result is preserved, the space
3495 allocated will be freed, as we know that it is no longer in use. */
3496 pop_temp_slots ();
3499 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3500 in question represents the outermost pair of curly braces (i.e. the "body
3501 block") of a function or method.
3503 For any BLOCK node representing a "body block" of a function or method, the
3504 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3505 represents the outermost (function) scope for the function or method (i.e.
3506 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3507 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3510 is_body_block (stmt)
3511 tree stmt;
3513 if (TREE_CODE (stmt) == BLOCK)
3515 tree parent = BLOCK_SUPERCONTEXT (stmt);
3517 if (parent && TREE_CODE (parent) == BLOCK)
3519 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3521 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3522 return 1;
3526 return 0;
3529 /* True if we are currently emitting insns in an area of output code
3530 that is controlled by a conditional expression. This is used by
3531 the cleanup handling code to generate conditional cleanup actions. */
3534 conditional_context ()
3536 return block_stack && block_stack->data.block.conditional_code;
3539 /* Return an opaque pointer to the current nesting level, so frontend code
3540 can check its own sanity. */
3542 struct nesting *
3543 current_nesting_level ()
3545 return cfun ? block_stack : 0;
3548 /* Emit a handler label for a nonlocal goto handler.
3549 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3551 static rtx
3552 expand_nl_handler_label (slot, before_insn)
3553 rtx slot, before_insn;
3555 rtx insns;
3556 rtx handler_label = gen_label_rtx ();
3558 /* Don't let cleanup_cfg delete the handler. */
3559 LABEL_PRESERVE_P (handler_label) = 1;
3561 start_sequence ();
3562 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3563 insns = get_insns ();
3564 end_sequence ();
3565 emit_insn_before (insns, before_insn);
3567 emit_label (handler_label);
3569 return handler_label;
3572 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3573 handler. */
3574 static void
3575 expand_nl_goto_receiver ()
3577 #ifdef HAVE_nonlocal_goto
3578 if (! HAVE_nonlocal_goto)
3579 #endif
3580 /* First adjust our frame pointer to its actual value. It was
3581 previously set to the start of the virtual area corresponding to
3582 the stacked variables when we branched here and now needs to be
3583 adjusted to the actual hardware fp value.
3585 Assignments are to virtual registers are converted by
3586 instantiate_virtual_regs into the corresponding assignment
3587 to the underlying register (fp in this case) that makes
3588 the original assignment true.
3589 So the following insn will actually be
3590 decrementing fp by STARTING_FRAME_OFFSET. */
3591 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3593 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3594 if (fixed_regs[ARG_POINTER_REGNUM])
3596 #ifdef ELIMINABLE_REGS
3597 /* If the argument pointer can be eliminated in favor of the
3598 frame pointer, we don't need to restore it. We assume here
3599 that if such an elimination is present, it can always be used.
3600 This is the case on all known machines; if we don't make this
3601 assumption, we do unnecessary saving on many machines. */
3602 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3603 size_t i;
3605 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3606 if (elim_regs[i].from == ARG_POINTER_REGNUM
3607 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3608 break;
3610 if (i == ARRAY_SIZE (elim_regs))
3611 #endif
3613 /* Now restore our arg pointer from the address at which it
3614 was saved in our stack frame. */
3615 emit_move_insn (virtual_incoming_args_rtx,
3616 copy_to_reg (get_arg_pointer_save_area (cfun)));
3619 #endif
3621 #ifdef HAVE_nonlocal_goto_receiver
3622 if (HAVE_nonlocal_goto_receiver)
3623 emit_insn (gen_nonlocal_goto_receiver ());
3624 #endif
3627 /* Make handlers for nonlocal gotos taking place in the function calls in
3628 block THISBLOCK. */
3630 static void
3631 expand_nl_goto_receivers (thisblock)
3632 struct nesting *thisblock;
3634 tree link;
3635 rtx afterward = gen_label_rtx ();
3636 rtx insns, slot;
3637 rtx label_list;
3638 int any_invalid;
3640 /* Record the handler address in the stack slot for that purpose,
3641 during this block, saving and restoring the outer value. */
3642 if (thisblock->next != 0)
3643 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3645 rtx save_receiver = gen_reg_rtx (Pmode);
3646 emit_move_insn (XEXP (slot, 0), save_receiver);
3648 start_sequence ();
3649 emit_move_insn (save_receiver, XEXP (slot, 0));
3650 insns = get_insns ();
3651 end_sequence ();
3652 emit_insn_before (insns, thisblock->data.block.first_insn);
3655 /* Jump around the handlers; they run only when specially invoked. */
3656 emit_jump (afterward);
3658 /* Make a separate handler for each label. */
3659 link = nonlocal_labels;
3660 slot = nonlocal_goto_handler_slots;
3661 label_list = NULL_RTX;
3662 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3663 /* Skip any labels we shouldn't be able to jump to from here,
3664 we generate one special handler for all of them below which just calls
3665 abort. */
3666 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3668 rtx lab;
3669 lab = expand_nl_handler_label (XEXP (slot, 0),
3670 thisblock->data.block.first_insn);
3671 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3673 expand_nl_goto_receiver ();
3675 /* Jump to the "real" nonlocal label. */
3676 expand_goto (TREE_VALUE (link));
3679 /* A second pass over all nonlocal labels; this time we handle those
3680 we should not be able to jump to at this point. */
3681 link = nonlocal_labels;
3682 slot = nonlocal_goto_handler_slots;
3683 any_invalid = 0;
3684 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3685 if (DECL_TOO_LATE (TREE_VALUE (link)))
3687 rtx lab;
3688 lab = expand_nl_handler_label (XEXP (slot, 0),
3689 thisblock->data.block.first_insn);
3690 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3691 any_invalid = 1;
3694 if (any_invalid)
3696 expand_nl_goto_receiver ();
3697 expand_builtin_trap ();
3700 nonlocal_goto_handler_labels = label_list;
3701 emit_label (afterward);
3704 /* Warn about any unused VARS (which may contain nodes other than
3705 VAR_DECLs, but such nodes are ignored). The nodes are connected
3706 via the TREE_CHAIN field. */
3708 void
3709 warn_about_unused_variables (vars)
3710 tree vars;
3712 tree decl;
3714 if (warn_unused_variable)
3715 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3716 if (TREE_CODE (decl) == VAR_DECL
3717 && ! TREE_USED (decl)
3718 && ! DECL_IN_SYSTEM_HEADER (decl)
3719 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3720 warning_with_decl (decl, "unused variable `%s'");
3723 /* Generate RTL code to terminate a binding contour.
3725 VARS is the chain of VAR_DECL nodes for the variables bound in this
3726 contour. There may actually be other nodes in this chain, but any
3727 nodes other than VAR_DECLS are ignored.
3729 MARK_ENDS is nonzero if we should put a note at the beginning
3730 and end of this binding contour.
3732 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3733 zero if we can jump into this contour only if it does not have a saved
3734 stack level, and negative if we are not to check for invalid use of
3735 labels (because the front end does that). */
3737 void
3738 expand_end_bindings (vars, mark_ends, dont_jump_in)
3739 tree vars;
3740 int mark_ends;
3741 int dont_jump_in;
3743 struct nesting *thisblock = block_stack;
3745 /* If any of the variables in this scope were not used, warn the
3746 user. */
3747 warn_about_unused_variables (vars);
3749 if (thisblock->exit_label)
3751 do_pending_stack_adjust ();
3752 emit_label (thisblock->exit_label);
3755 /* If necessary, make handlers for nonlocal gotos taking
3756 place in the function calls in this block. */
3757 if (function_call_count != 0 && nonlocal_labels
3758 /* Make handler for outermost block
3759 if there were any nonlocal gotos to this function. */
3760 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3761 /* Make handler for inner block if it has something
3762 special to do when you jump out of it. */
3763 : (thisblock->data.block.cleanups != 0
3764 || thisblock->data.block.stack_level != 0)))
3765 expand_nl_goto_receivers (thisblock);
3767 /* Don't allow jumping into a block that has a stack level.
3768 Cleanups are allowed, though. */
3769 if (dont_jump_in > 0
3770 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3772 struct label_chain *chain;
3774 /* Any labels in this block are no longer valid to go to.
3775 Mark them to cause an error message. */
3776 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3778 DECL_TOO_LATE (chain->label) = 1;
3779 /* If any goto without a fixup came to this label,
3780 that must be an error, because gotos without fixups
3781 come from outside all saved stack-levels. */
3782 if (TREE_ADDRESSABLE (chain->label))
3783 error_with_decl (chain->label,
3784 "label `%s' used before containing binding contour");
3788 /* Restore stack level in effect before the block
3789 (only if variable-size objects allocated). */
3790 /* Perform any cleanups associated with the block. */
3792 if (thisblock->data.block.stack_level != 0
3793 || thisblock->data.block.cleanups != 0)
3795 int reachable;
3796 rtx insn;
3798 /* Don't let cleanups affect ({...}) constructs. */
3799 int old_expr_stmts_for_value = expr_stmts_for_value;
3800 rtx old_last_expr_value = last_expr_value;
3801 tree old_last_expr_type = last_expr_type;
3802 expr_stmts_for_value = 0;
3804 /* Only clean up here if this point can actually be reached. */
3805 insn = get_last_insn ();
3806 if (GET_CODE (insn) == NOTE)
3807 insn = prev_nonnote_insn (insn);
3808 reachable = (! insn || GET_CODE (insn) != BARRIER);
3810 /* Do the cleanups. */
3811 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3812 if (reachable)
3813 do_pending_stack_adjust ();
3815 expr_stmts_for_value = old_expr_stmts_for_value;
3816 last_expr_value = old_last_expr_value;
3817 last_expr_type = old_last_expr_type;
3819 /* Restore the stack level. */
3821 if (reachable && thisblock->data.block.stack_level != 0)
3823 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3824 thisblock->data.block.stack_level, NULL_RTX);
3825 if (nonlocal_goto_handler_slots != 0)
3826 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3827 NULL_RTX);
3830 /* Any gotos out of this block must also do these things.
3831 Also report any gotos with fixups that came to labels in this
3832 level. */
3833 fixup_gotos (thisblock,
3834 thisblock->data.block.stack_level,
3835 thisblock->data.block.cleanups,
3836 thisblock->data.block.first_insn,
3837 dont_jump_in);
3840 /* Mark the beginning and end of the scope if requested.
3841 We do this now, after running cleanups on the variables
3842 just going out of scope, so they are in scope for their cleanups. */
3844 if (mark_ends)
3846 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3847 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3849 else
3850 /* Get rid of the beginning-mark if we don't make an end-mark. */
3851 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3853 /* Restore the temporary level of TARGET_EXPRs. */
3854 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3856 /* Restore block_stack level for containing block. */
3858 stack_block_stack = thisblock->data.block.innermost_stack_block;
3859 POPSTACK (block_stack);
3861 /* Pop the stack slot nesting and free any slots at this level. */
3862 pop_temp_slots ();
3865 /* Generate code to save the stack pointer at the start of the current block
3866 and set up to restore it on exit. */
3868 void
3869 save_stack_pointer ()
3871 struct nesting *thisblock = block_stack;
3873 if (thisblock->data.block.stack_level == 0)
3875 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3876 &thisblock->data.block.stack_level,
3877 thisblock->data.block.first_insn);
3878 stack_block_stack = thisblock;
3882 /* Generate RTL for the automatic variable declaration DECL.
3883 (Other kinds of declarations are simply ignored if seen here.) */
3885 void
3886 expand_decl (decl)
3887 tree decl;
3889 tree type;
3891 type = TREE_TYPE (decl);
3893 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3894 type in case this node is used in a reference. */
3895 if (TREE_CODE (decl) == CONST_DECL)
3897 DECL_MODE (decl) = TYPE_MODE (type);
3898 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3899 DECL_SIZE (decl) = TYPE_SIZE (type);
3900 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3901 return;
3904 /* Otherwise, only automatic variables need any expansion done. Static and
3905 external variables, and external functions, will be handled by
3906 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3907 nothing. PARM_DECLs are handled in `assign_parms'. */
3908 if (TREE_CODE (decl) != VAR_DECL)
3909 return;
3911 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3912 return;
3914 /* Create the RTL representation for the variable. */
3916 if (type == error_mark_node)
3917 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3919 else if (DECL_SIZE (decl) == 0)
3920 /* Variable with incomplete type. */
3922 rtx x;
3923 if (DECL_INITIAL (decl) == 0)
3924 /* Error message was already done; now avoid a crash. */
3925 x = gen_rtx_MEM (BLKmode, const0_rtx);
3926 else
3927 /* An initializer is going to decide the size of this array.
3928 Until we know the size, represent its address with a reg. */
3929 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3931 set_mem_attributes (x, decl, 1);
3932 SET_DECL_RTL (decl, x);
3934 else if (DECL_MODE (decl) != BLKmode
3935 /* If -ffloat-store, don't put explicit float vars
3936 into regs. */
3937 && !(flag_float_store
3938 && TREE_CODE (type) == REAL_TYPE)
3939 && ! TREE_THIS_VOLATILE (decl)
3940 && ! DECL_NONLOCAL (decl)
3941 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3943 /* Automatic variable that can go in a register. */
3944 int unsignedp = TREE_UNSIGNED (type);
3945 enum machine_mode reg_mode
3946 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3948 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3950 if (!DECL_ARTIFICIAL (decl))
3951 mark_user_reg (DECL_RTL (decl));
3953 if (POINTER_TYPE_P (type))
3954 mark_reg_pointer (DECL_RTL (decl),
3955 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3957 maybe_set_unchanging (DECL_RTL (decl), decl);
3959 /* If something wants our address, try to use ADDRESSOF. */
3960 if (TREE_ADDRESSABLE (decl))
3961 put_var_into_stack (decl, /*rescan=*/false);
3964 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3965 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3966 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3967 STACK_CHECK_MAX_VAR_SIZE)))
3969 /* Variable of fixed size that goes on the stack. */
3970 rtx oldaddr = 0;
3971 rtx addr;
3972 rtx x;
3974 /* If we previously made RTL for this decl, it must be an array
3975 whose size was determined by the initializer.
3976 The old address was a register; set that register now
3977 to the proper address. */
3978 if (DECL_RTL_SET_P (decl))
3980 if (GET_CODE (DECL_RTL (decl)) != MEM
3981 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3982 abort ();
3983 oldaddr = XEXP (DECL_RTL (decl), 0);
3986 /* Set alignment we actually gave this decl. */
3987 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3988 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3989 DECL_USER_ALIGN (decl) = 0;
3991 x = assign_temp (decl, 1, 1, 1);
3992 set_mem_attributes (x, decl, 1);
3993 SET_DECL_RTL (decl, x);
3995 if (oldaddr)
3997 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3998 if (addr != oldaddr)
3999 emit_move_insn (oldaddr, addr);
4002 else
4003 /* Dynamic-size object: must push space on the stack. */
4005 rtx address, size, x;
4007 /* Record the stack pointer on entry to block, if have
4008 not already done so. */
4009 do_pending_stack_adjust ();
4010 save_stack_pointer ();
4012 /* In function-at-a-time mode, variable_size doesn't expand this,
4013 so do it now. */
4014 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4015 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4016 const0_rtx, VOIDmode, 0);
4018 /* Compute the variable's size, in bytes. */
4019 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4020 free_temp_slots ();
4022 /* Allocate space on the stack for the variable. Note that
4023 DECL_ALIGN says how the variable is to be aligned and we
4024 cannot use it to conclude anything about the alignment of
4025 the size. */
4026 address = allocate_dynamic_stack_space (size, NULL_RTX,
4027 TYPE_ALIGN (TREE_TYPE (decl)));
4029 /* Reference the variable indirect through that rtx. */
4030 x = gen_rtx_MEM (DECL_MODE (decl), address);
4031 set_mem_attributes (x, decl, 1);
4032 SET_DECL_RTL (decl, x);
4035 /* Indicate the alignment we actually gave this variable. */
4036 #ifdef STACK_BOUNDARY
4037 DECL_ALIGN (decl) = STACK_BOUNDARY;
4038 #else
4039 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4040 #endif
4041 DECL_USER_ALIGN (decl) = 0;
4045 /* Emit code to perform the initialization of a declaration DECL. */
4047 void
4048 expand_decl_init (decl)
4049 tree decl;
4051 int was_used = TREE_USED (decl);
4053 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4054 for static decls. */
4055 if (TREE_CODE (decl) == CONST_DECL
4056 || TREE_STATIC (decl))
4057 return;
4059 /* Compute and store the initial value now. */
4061 push_temp_slots ();
4063 if (DECL_INITIAL (decl) == error_mark_node)
4065 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4067 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4068 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4069 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4070 0, 0);
4071 emit_queue ();
4073 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4075 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4076 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4077 emit_queue ();
4080 /* Don't let the initialization count as "using" the variable. */
4081 TREE_USED (decl) = was_used;
4083 /* Free any temporaries we made while initializing the decl. */
4084 preserve_temp_slots (NULL_RTX);
4085 free_temp_slots ();
4086 pop_temp_slots ();
4089 /* CLEANUP is an expression to be executed at exit from this binding contour;
4090 for example, in C++, it might call the destructor for this variable.
4092 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4093 CLEANUP multiple times, and have the correct semantics. This
4094 happens in exception handling, for gotos, returns, breaks that
4095 leave the current scope.
4097 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4098 that is not associated with any particular variable. */
4101 expand_decl_cleanup (decl, cleanup)
4102 tree decl, cleanup;
4104 struct nesting *thisblock;
4106 /* Error if we are not in any block. */
4107 if (cfun == 0 || block_stack == 0)
4108 return 0;
4110 thisblock = block_stack;
4112 /* Record the cleanup if there is one. */
4114 if (cleanup != 0)
4116 tree t;
4117 rtx seq;
4118 tree *cleanups = &thisblock->data.block.cleanups;
4119 int cond_context = conditional_context ();
4121 if (cond_context)
4123 rtx flag = gen_reg_rtx (word_mode);
4124 rtx set_flag_0;
4125 tree cond;
4127 start_sequence ();
4128 emit_move_insn (flag, const0_rtx);
4129 set_flag_0 = get_insns ();
4130 end_sequence ();
4132 thisblock->data.block.last_unconditional_cleanup
4133 = emit_insn_after (set_flag_0,
4134 thisblock->data.block.last_unconditional_cleanup);
4136 emit_move_insn (flag, const1_rtx);
4138 cond = build_decl (VAR_DECL, NULL_TREE,
4139 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4140 SET_DECL_RTL (cond, flag);
4142 /* Conditionalize the cleanup. */
4143 cleanup = build (COND_EXPR, void_type_node,
4144 (*lang_hooks.truthvalue_conversion) (cond),
4145 cleanup, integer_zero_node);
4146 cleanup = fold (cleanup);
4148 cleanups = &thisblock->data.block.cleanups;
4151 cleanup = unsave_expr (cleanup);
4153 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4155 if (! cond_context)
4156 /* If this block has a cleanup, it belongs in stack_block_stack. */
4157 stack_block_stack = thisblock;
4159 if (cond_context)
4161 start_sequence ();
4164 if (! using_eh_for_cleanups_p)
4165 TREE_ADDRESSABLE (t) = 1;
4166 else
4167 expand_eh_region_start ();
4169 if (cond_context)
4171 seq = get_insns ();
4172 end_sequence ();
4173 if (seq)
4174 thisblock->data.block.last_unconditional_cleanup
4175 = emit_insn_after (seq,
4176 thisblock->data.block.last_unconditional_cleanup);
4178 else
4180 thisblock->data.block.last_unconditional_cleanup
4181 = get_last_insn ();
4182 /* When we insert instructions after the last unconditional cleanup,
4183 we don't adjust last_insn. That means that a later add_insn will
4184 clobber the instructions we've just added. The easiest way to
4185 fix this is to just insert another instruction here, so that the
4186 instructions inserted after the last unconditional cleanup are
4187 never the last instruction. */
4188 emit_note (NULL, NOTE_INSN_DELETED);
4191 return 1;
4194 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4195 is thrown. */
4198 expand_decl_cleanup_eh (decl, cleanup, eh_only)
4199 tree decl, cleanup;
4200 int eh_only;
4202 int ret = expand_decl_cleanup (decl, cleanup);
4203 if (cleanup && ret)
4205 tree node = block_stack->data.block.cleanups;
4206 CLEANUP_EH_ONLY (node) = eh_only;
4208 return ret;
4211 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4212 DECL_ELTS is the list of elements that belong to DECL's type.
4213 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4215 void
4216 expand_anon_union_decl (decl, cleanup, decl_elts)
4217 tree decl, cleanup, decl_elts;
4219 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4220 rtx x;
4221 tree t;
4223 /* If any of the elements are addressable, so is the entire union. */
4224 for (t = decl_elts; t; t = TREE_CHAIN (t))
4225 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4227 TREE_ADDRESSABLE (decl) = 1;
4228 break;
4231 expand_decl (decl);
4232 expand_decl_cleanup (decl, cleanup);
4233 x = DECL_RTL (decl);
4235 /* Go through the elements, assigning RTL to each. */
4236 for (t = decl_elts; t; t = TREE_CHAIN (t))
4238 tree decl_elt = TREE_VALUE (t);
4239 tree cleanup_elt = TREE_PURPOSE (t);
4240 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4242 /* If any of the elements are addressable, so is the entire
4243 union. */
4244 if (TREE_USED (decl_elt))
4245 TREE_USED (decl) = 1;
4247 /* Propagate the union's alignment to the elements. */
4248 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4249 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4251 /* If the element has BLKmode and the union doesn't, the union is
4252 aligned such that the element doesn't need to have BLKmode, so
4253 change the element's mode to the appropriate one for its size. */
4254 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4255 DECL_MODE (decl_elt) = mode
4256 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4258 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4259 instead create a new MEM rtx with the proper mode. */
4260 if (GET_CODE (x) == MEM)
4262 if (mode == GET_MODE (x))
4263 SET_DECL_RTL (decl_elt, x);
4264 else
4265 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4267 else if (GET_CODE (x) == REG)
4269 if (mode == GET_MODE (x))
4270 SET_DECL_RTL (decl_elt, x);
4271 else
4272 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4274 else
4275 abort ();
4277 /* Record the cleanup if there is one. */
4279 if (cleanup != 0)
4280 thisblock->data.block.cleanups
4281 = tree_cons (decl_elt, cleanup_elt,
4282 thisblock->data.block.cleanups);
4286 /* Expand a list of cleanups LIST.
4287 Elements may be expressions or may be nested lists.
4289 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4290 goto and handle protection regions specially in that case.
4292 If REACHABLE, we emit code, otherwise just inform the exception handling
4293 code about this finalization. */
4295 static void
4296 expand_cleanups (list, in_fixup, reachable)
4297 tree list;
4298 int in_fixup;
4299 int reachable;
4301 tree tail;
4302 for (tail = list; tail; tail = TREE_CHAIN (tail))
4303 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4304 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
4305 else
4307 if (! in_fixup && using_eh_for_cleanups_p)
4308 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4310 if (reachable && !CLEANUP_EH_ONLY (tail))
4312 /* Cleanups may be run multiple times. For example,
4313 when exiting a binding contour, we expand the
4314 cleanups associated with that contour. When a goto
4315 within that binding contour has a target outside that
4316 contour, it will expand all cleanups from its scope to
4317 the target. Though the cleanups are expanded multiple
4318 times, the control paths are non-overlapping so the
4319 cleanups will not be executed twice. */
4321 /* We may need to protect from outer cleanups. */
4322 if (in_fixup && using_eh_for_cleanups_p)
4324 expand_eh_region_start ();
4326 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4328 expand_eh_region_end_fixup (TREE_VALUE (tail));
4330 else
4331 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4333 free_temp_slots ();
4338 /* Mark when the context we are emitting RTL for as a conditional
4339 context, so that any cleanup actions we register with
4340 expand_decl_init will be properly conditionalized when those
4341 cleanup actions are later performed. Must be called before any
4342 expression (tree) is expanded that is within a conditional context. */
4344 void
4345 start_cleanup_deferral ()
4347 /* block_stack can be NULL if we are inside the parameter list. It is
4348 OK to do nothing, because cleanups aren't possible here. */
4349 if (block_stack)
4350 ++block_stack->data.block.conditional_code;
4353 /* Mark the end of a conditional region of code. Because cleanup
4354 deferrals may be nested, we may still be in a conditional region
4355 after we end the currently deferred cleanups, only after we end all
4356 deferred cleanups, are we back in unconditional code. */
4358 void
4359 end_cleanup_deferral ()
4361 /* block_stack can be NULL if we are inside the parameter list. It is
4362 OK to do nothing, because cleanups aren't possible here. */
4363 if (block_stack)
4364 --block_stack->data.block.conditional_code;
4367 tree
4368 last_cleanup_this_contour ()
4370 if (block_stack == 0)
4371 return 0;
4373 return block_stack->data.block.cleanups;
4376 /* Return 1 if there are any pending cleanups at this point.
4377 If THIS_CONTOUR is nonzero, check the current contour as well.
4378 Otherwise, look only at the contours that enclose this one. */
4381 any_pending_cleanups (this_contour)
4382 int this_contour;
4384 struct nesting *block;
4386 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4387 return 0;
4389 if (this_contour && block_stack->data.block.cleanups != NULL)
4390 return 1;
4391 if (block_stack->data.block.cleanups == 0
4392 && block_stack->data.block.outer_cleanups == 0)
4393 return 0;
4395 for (block = block_stack->next; block; block = block->next)
4396 if (block->data.block.cleanups != 0)
4397 return 1;
4399 return 0;
4402 /* Enter a case (Pascal) or switch (C) statement.
4403 Push a block onto case_stack and nesting_stack
4404 to accumulate the case-labels that are seen
4405 and to record the labels generated for the statement.
4407 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4408 Otherwise, this construct is transparent for `exit_something'.
4410 EXPR is the index-expression to be dispatched on.
4411 TYPE is its nominal type. We could simply convert EXPR to this type,
4412 but instead we take short cuts. */
4414 void
4415 expand_start_case (exit_flag, expr, type, printname)
4416 int exit_flag;
4417 tree expr;
4418 tree type;
4419 const char *printname;
4421 struct nesting *thiscase = ALLOC_NESTING ();
4423 /* Make an entry on case_stack for the case we are entering. */
4425 thiscase->desc = CASE_NESTING;
4426 thiscase->next = case_stack;
4427 thiscase->all = nesting_stack;
4428 thiscase->depth = ++nesting_depth;
4429 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4430 thiscase->data.case_stmt.case_list = 0;
4431 thiscase->data.case_stmt.index_expr = expr;
4432 thiscase->data.case_stmt.nominal_type = type;
4433 thiscase->data.case_stmt.default_label = 0;
4434 thiscase->data.case_stmt.printname = printname;
4435 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4436 case_stack = thiscase;
4437 nesting_stack = thiscase;
4439 do_pending_stack_adjust ();
4440 emit_queue ();
4442 /* Make sure case_stmt.start points to something that won't
4443 need any transformation before expand_end_case. */
4444 if (GET_CODE (get_last_insn ()) != NOTE)
4445 emit_note (NULL, NOTE_INSN_DELETED);
4447 thiscase->data.case_stmt.start = get_last_insn ();
4449 start_cleanup_deferral ();
4452 /* Start a "dummy case statement" within which case labels are invalid
4453 and are not connected to any larger real case statement.
4454 This can be used if you don't want to let a case statement jump
4455 into the middle of certain kinds of constructs. */
4457 void
4458 expand_start_case_dummy ()
4460 struct nesting *thiscase = ALLOC_NESTING ();
4462 /* Make an entry on case_stack for the dummy. */
4464 thiscase->desc = CASE_NESTING;
4465 thiscase->next = case_stack;
4466 thiscase->all = nesting_stack;
4467 thiscase->depth = ++nesting_depth;
4468 thiscase->exit_label = 0;
4469 thiscase->data.case_stmt.case_list = 0;
4470 thiscase->data.case_stmt.start = 0;
4471 thiscase->data.case_stmt.nominal_type = 0;
4472 thiscase->data.case_stmt.default_label = 0;
4473 case_stack = thiscase;
4474 nesting_stack = thiscase;
4475 start_cleanup_deferral ();
4478 static void
4479 check_seenlabel ()
4481 /* If this is the first label, warn if any insns have been emitted. */
4482 if (case_stack->data.case_stmt.line_number_status >= 0)
4484 rtx insn;
4486 restore_line_number_status
4487 (case_stack->data.case_stmt.line_number_status);
4488 case_stack->data.case_stmt.line_number_status = -1;
4490 for (insn = case_stack->data.case_stmt.start;
4491 insn;
4492 insn = NEXT_INSN (insn))
4494 if (GET_CODE (insn) == CODE_LABEL)
4495 break;
4496 if (GET_CODE (insn) != NOTE
4497 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4500 insn = PREV_INSN (insn);
4501 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4503 /* If insn is zero, then there must have been a syntax error. */
4504 if (insn)
4506 location_t locus;
4507 locus.file = NOTE_SOURCE_FILE (insn);
4508 locus.line = NOTE_LINE_NUMBER (insn);
4509 warning ("%Hunreachable code at beginning of %s", &locus,
4510 case_stack->data.case_stmt.printname);
4512 break;
4518 /* Accumulate one case or default label inside a case or switch statement.
4519 VALUE is the value of the case (a null pointer, for a default label).
4520 The function CONVERTER, when applied to arguments T and V,
4521 converts the value V to the type T.
4523 If not currently inside a case or switch statement, return 1 and do
4524 nothing. The caller will print a language-specific error message.
4525 If VALUE is a duplicate or overlaps, return 2 and do nothing
4526 except store the (first) duplicate node in *DUPLICATE.
4527 If VALUE is out of range, return 3 and do nothing.
4528 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4529 Return 0 on success.
4531 Extended to handle range statements. */
4534 pushcase (value, converter, label, duplicate)
4535 tree value;
4536 tree (*converter) PARAMS ((tree, tree));
4537 tree label;
4538 tree *duplicate;
4540 tree index_type;
4541 tree nominal_type;
4543 /* Fail if not inside a real case statement. */
4544 if (! (case_stack && case_stack->data.case_stmt.start))
4545 return 1;
4547 if (stack_block_stack
4548 && stack_block_stack->depth > case_stack->depth)
4549 return 5;
4551 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4552 nominal_type = case_stack->data.case_stmt.nominal_type;
4554 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4555 if (index_type == error_mark_node)
4556 return 0;
4558 /* Convert VALUE to the type in which the comparisons are nominally done. */
4559 if (value != 0)
4560 value = (*converter) (nominal_type, value);
4562 check_seenlabel ();
4564 /* Fail if this value is out of range for the actual type of the index
4565 (which may be narrower than NOMINAL_TYPE). */
4566 if (value != 0
4567 && (TREE_CONSTANT_OVERFLOW (value)
4568 || ! int_fits_type_p (value, index_type)))
4569 return 3;
4571 return add_case_node (value, value, label, duplicate);
4574 /* Like pushcase but this case applies to all values between VALUE1 and
4575 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4576 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4577 starts at VALUE1 and ends at the highest value of the index type.
4578 If both are NULL, this case applies to all values.
4580 The return value is the same as that of pushcase but there is one
4581 additional error code: 4 means the specified range was empty. */
4584 pushcase_range (value1, value2, converter, label, duplicate)
4585 tree value1, value2;
4586 tree (*converter) PARAMS ((tree, tree));
4587 tree label;
4588 tree *duplicate;
4590 tree index_type;
4591 tree nominal_type;
4593 /* Fail if not inside a real case statement. */
4594 if (! (case_stack && case_stack->data.case_stmt.start))
4595 return 1;
4597 if (stack_block_stack
4598 && stack_block_stack->depth > case_stack->depth)
4599 return 5;
4601 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4602 nominal_type = case_stack->data.case_stmt.nominal_type;
4604 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4605 if (index_type == error_mark_node)
4606 return 0;
4608 check_seenlabel ();
4610 /* Convert VALUEs to type in which the comparisons are nominally done
4611 and replace any unspecified value with the corresponding bound. */
4612 if (value1 == 0)
4613 value1 = TYPE_MIN_VALUE (index_type);
4614 if (value2 == 0)
4615 value2 = TYPE_MAX_VALUE (index_type);
4617 /* Fail if the range is empty. Do this before any conversion since
4618 we want to allow out-of-range empty ranges. */
4619 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4620 return 4;
4622 /* If the max was unbounded, use the max of the nominal_type we are
4623 converting to. Do this after the < check above to suppress false
4624 positives. */
4625 if (value2 == 0)
4626 value2 = TYPE_MAX_VALUE (nominal_type);
4628 value1 = (*converter) (nominal_type, value1);
4629 value2 = (*converter) (nominal_type, value2);
4631 /* Fail if these values are out of range. */
4632 if (TREE_CONSTANT_OVERFLOW (value1)
4633 || ! int_fits_type_p (value1, index_type))
4634 return 3;
4636 if (TREE_CONSTANT_OVERFLOW (value2)
4637 || ! int_fits_type_p (value2, index_type))
4638 return 3;
4640 return add_case_node (value1, value2, label, duplicate);
4643 /* Do the actual insertion of a case label for pushcase and pushcase_range
4644 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4645 slowdown for large switch statements. */
4648 add_case_node (low, high, label, duplicate)
4649 tree low, high;
4650 tree label;
4651 tree *duplicate;
4653 struct case_node *p, **q, *r;
4655 /* If there's no HIGH value, then this is not a case range; it's
4656 just a simple case label. But that's just a degenerate case
4657 range. */
4658 if (!high)
4659 high = low;
4661 /* Handle default labels specially. */
4662 if (!high && !low)
4664 if (case_stack->data.case_stmt.default_label != 0)
4666 *duplicate = case_stack->data.case_stmt.default_label;
4667 return 2;
4669 case_stack->data.case_stmt.default_label = label;
4670 expand_label (label);
4671 return 0;
4674 q = &case_stack->data.case_stmt.case_list;
4675 p = *q;
4677 while ((r = *q))
4679 p = r;
4681 /* Keep going past elements distinctly greater than HIGH. */
4682 if (tree_int_cst_lt (high, p->low))
4683 q = &p->left;
4685 /* or distinctly less than LOW. */
4686 else if (tree_int_cst_lt (p->high, low))
4687 q = &p->right;
4689 else
4691 /* We have an overlap; this is an error. */
4692 *duplicate = p->code_label;
4693 return 2;
4697 /* Add this label to the chain, and succeed. */
4699 r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
4700 r->low = low;
4702 /* If the bounds are equal, turn this into the one-value case. */
4703 if (tree_int_cst_equal (low, high))
4704 r->high = r->low;
4705 else
4706 r->high = high;
4708 r->code_label = label;
4709 expand_label (label);
4711 *q = r;
4712 r->parent = p;
4713 r->left = 0;
4714 r->right = 0;
4715 r->balance = 0;
4717 while (p)
4719 struct case_node *s;
4721 if (r == p->left)
4723 int b;
4725 if (! (b = p->balance))
4726 /* Growth propagation from left side. */
4727 p->balance = -1;
4728 else if (b < 0)
4730 if (r->balance < 0)
4732 /* R-Rotation */
4733 if ((p->left = s = r->right))
4734 s->parent = p;
4736 r->right = p;
4737 p->balance = 0;
4738 r->balance = 0;
4739 s = p->parent;
4740 p->parent = r;
4742 if ((r->parent = s))
4744 if (s->left == p)
4745 s->left = r;
4746 else
4747 s->right = r;
4749 else
4750 case_stack->data.case_stmt.case_list = r;
4752 else
4753 /* r->balance == +1 */
4755 /* LR-Rotation */
4757 int b2;
4758 struct case_node *t = r->right;
4760 if ((p->left = s = t->right))
4761 s->parent = p;
4763 t->right = p;
4764 if ((r->right = s = t->left))
4765 s->parent = r;
4767 t->left = r;
4768 b = t->balance;
4769 b2 = b < 0;
4770 p->balance = b2;
4771 b2 = -b2 - b;
4772 r->balance = b2;
4773 t->balance = 0;
4774 s = p->parent;
4775 p->parent = t;
4776 r->parent = t;
4778 if ((t->parent = s))
4780 if (s->left == p)
4781 s->left = t;
4782 else
4783 s->right = t;
4785 else
4786 case_stack->data.case_stmt.case_list = t;
4788 break;
4791 else
4793 /* p->balance == +1; growth of left side balances the node. */
4794 p->balance = 0;
4795 break;
4798 else
4799 /* r == p->right */
4801 int b;
4803 if (! (b = p->balance))
4804 /* Growth propagation from right side. */
4805 p->balance++;
4806 else if (b > 0)
4808 if (r->balance > 0)
4810 /* L-Rotation */
4812 if ((p->right = s = r->left))
4813 s->parent = p;
4815 r->left = p;
4816 p->balance = 0;
4817 r->balance = 0;
4818 s = p->parent;
4819 p->parent = r;
4820 if ((r->parent = s))
4822 if (s->left == p)
4823 s->left = r;
4824 else
4825 s->right = r;
4828 else
4829 case_stack->data.case_stmt.case_list = r;
4832 else
4833 /* r->balance == -1 */
4835 /* RL-Rotation */
4836 int b2;
4837 struct case_node *t = r->left;
4839 if ((p->right = s = t->left))
4840 s->parent = p;
4842 t->left = p;
4844 if ((r->left = s = t->right))
4845 s->parent = r;
4847 t->right = r;
4848 b = t->balance;
4849 b2 = b < 0;
4850 r->balance = b2;
4851 b2 = -b2 - b;
4852 p->balance = b2;
4853 t->balance = 0;
4854 s = p->parent;
4855 p->parent = t;
4856 r->parent = t;
4858 if ((t->parent = s))
4860 if (s->left == p)
4861 s->left = t;
4862 else
4863 s->right = t;
4866 else
4867 case_stack->data.case_stmt.case_list = t;
4869 break;
4871 else
4873 /* p->balance == -1; growth of right side balances the node. */
4874 p->balance = 0;
4875 break;
4879 r = p;
4880 p = p->parent;
4883 return 0;
4886 /* Returns the number of possible values of TYPE.
4887 Returns -1 if the number is unknown, variable, or if the number does not
4888 fit in a HOST_WIDE_INT.
4889 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4890 do not increase monotonically (there may be duplicates);
4891 to 1 if the values increase monotonically, but not always by 1;
4892 otherwise sets it to 0. */
4894 HOST_WIDE_INT
4895 all_cases_count (type, sparseness)
4896 tree type;
4897 int *sparseness;
4899 tree t;
4900 HOST_WIDE_INT count, minval, lastval;
4902 *sparseness = 0;
4904 switch (TREE_CODE (type))
4906 case BOOLEAN_TYPE:
4907 count = 2;
4908 break;
4910 case CHAR_TYPE:
4911 count = 1 << BITS_PER_UNIT;
4912 break;
4914 default:
4915 case INTEGER_TYPE:
4916 if (TYPE_MAX_VALUE (type) != 0
4917 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4918 TYPE_MIN_VALUE (type))))
4919 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4920 convert (type, integer_zero_node))))
4921 && host_integerp (t, 1))
4922 count = tree_low_cst (t, 1);
4923 else
4924 return -1;
4925 break;
4927 case ENUMERAL_TYPE:
4928 /* Don't waste time with enumeral types with huge values. */
4929 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4930 || TYPE_MAX_VALUE (type) == 0
4931 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4932 return -1;
4934 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4935 count = 0;
4937 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4939 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4941 if (*sparseness == 2 || thisval <= lastval)
4942 *sparseness = 2;
4943 else if (thisval != minval + count)
4944 *sparseness = 1;
4946 lastval = thisval;
4947 count++;
4951 return count;
4954 #define BITARRAY_TEST(ARRAY, INDEX) \
4955 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4956 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4957 #define BITARRAY_SET(ARRAY, INDEX) \
4958 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4959 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4961 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4962 with the case values we have seen, assuming the case expression
4963 has the given TYPE.
4964 SPARSENESS is as determined by all_cases_count.
4966 The time needed is proportional to COUNT, unless
4967 SPARSENESS is 2, in which case quadratic time is needed. */
4969 void
4970 mark_seen_cases (type, cases_seen, count, sparseness)
4971 tree type;
4972 unsigned char *cases_seen;
4973 HOST_WIDE_INT count;
4974 int sparseness;
4976 tree next_node_to_try = NULL_TREE;
4977 HOST_WIDE_INT next_node_offset = 0;
4979 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4980 tree val = make_node (INTEGER_CST);
4982 TREE_TYPE (val) = type;
4983 if (! root)
4984 /* Do nothing. */
4986 else if (sparseness == 2)
4988 tree t;
4989 unsigned HOST_WIDE_INT xlo;
4991 /* This less efficient loop is only needed to handle
4992 duplicate case values (multiple enum constants
4993 with the same value). */
4994 TREE_TYPE (val) = TREE_TYPE (root->low);
4995 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4996 t = TREE_CHAIN (t), xlo++)
4998 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4999 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5000 n = root;
5003 /* Keep going past elements distinctly greater than VAL. */
5004 if (tree_int_cst_lt (val, n->low))
5005 n = n->left;
5007 /* or distinctly less than VAL. */
5008 else if (tree_int_cst_lt (n->high, val))
5009 n = n->right;
5011 else
5013 /* We have found a matching range. */
5014 BITARRAY_SET (cases_seen, xlo);
5015 break;
5018 while (n);
5021 else
5023 if (root->left)
5024 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5026 for (n = root; n; n = n->right)
5028 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5029 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5030 while (! tree_int_cst_lt (n->high, val))
5032 /* Calculate (into xlo) the "offset" of the integer (val).
5033 The element with lowest value has offset 0, the next smallest
5034 element has offset 1, etc. */
5036 unsigned HOST_WIDE_INT xlo;
5037 HOST_WIDE_INT xhi;
5038 tree t;
5040 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5042 /* The TYPE_VALUES will be in increasing order, so
5043 starting searching where we last ended. */
5044 t = next_node_to_try;
5045 xlo = next_node_offset;
5046 xhi = 0;
5047 for (;;)
5049 if (t == NULL_TREE)
5051 t = TYPE_VALUES (type);
5052 xlo = 0;
5054 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5056 next_node_to_try = TREE_CHAIN (t);
5057 next_node_offset = xlo + 1;
5058 break;
5060 xlo++;
5061 t = TREE_CHAIN (t);
5062 if (t == next_node_to_try)
5064 xlo = -1;
5065 break;
5069 else
5071 t = TYPE_MIN_VALUE (type);
5072 if (t)
5073 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5074 &xlo, &xhi);
5075 else
5076 xlo = xhi = 0;
5077 add_double (xlo, xhi,
5078 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5079 &xlo, &xhi);
5082 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5083 BITARRAY_SET (cases_seen, xlo);
5085 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5086 1, 0,
5087 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5093 /* Given a switch statement with an expression that is an enumeration
5094 type, warn if any of the enumeration type's literals are not
5095 covered by the case expressions of the switch. Also, warn if there
5096 are any extra switch cases that are *not* elements of the
5097 enumerated type.
5099 Historical note:
5101 At one stage this function would: ``If all enumeration literals
5102 were covered by the case expressions, turn one of the expressions
5103 into the default expression since it should not be possible to fall
5104 through such a switch.''
5106 That code has since been removed as: ``This optimization is
5107 disabled because it causes valid programs to fail. ANSI C does not
5108 guarantee that an expression with enum type will have a value that
5109 is the same as one of the enumeration literals.'' */
5111 void
5112 check_for_full_enumeration_handling (type)
5113 tree type;
5115 struct case_node *n;
5116 tree chain;
5118 /* True iff the selector type is a numbered set mode. */
5119 int sparseness = 0;
5121 /* The number of possible selector values. */
5122 HOST_WIDE_INT size;
5124 /* For each possible selector value. a one iff it has been matched
5125 by a case value alternative. */
5126 unsigned char *cases_seen;
5128 /* The allocated size of cases_seen, in chars. */
5129 HOST_WIDE_INT bytes_needed;
5131 size = all_cases_count (type, &sparseness);
5132 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5134 if (size > 0 && size < 600000
5135 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5136 this optimization if we don't have enough memory rather than
5137 aborting, as xmalloc would do. */
5138 && (cases_seen =
5139 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5141 HOST_WIDE_INT i;
5142 tree v = TYPE_VALUES (type);
5144 /* The time complexity of this code is normally O(N), where
5145 N being the number of members in the enumerated type.
5146 However, if type is an ENUMERAL_TYPE whose values do not
5147 increase monotonically, O(N*log(N)) time may be needed. */
5149 mark_seen_cases (type, cases_seen, size, sparseness);
5151 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5152 if (BITARRAY_TEST (cases_seen, i) == 0)
5153 warning ("enumeration value `%s' not handled in switch",
5154 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5156 free (cases_seen);
5159 /* Now we go the other way around; we warn if there are case
5160 expressions that don't correspond to enumerators. This can
5161 occur since C and C++ don't enforce type-checking of
5162 assignments to enumeration variables. */
5164 if (case_stack->data.case_stmt.case_list
5165 && case_stack->data.case_stmt.case_list->left)
5166 case_stack->data.case_stmt.case_list
5167 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5168 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5170 for (chain = TYPE_VALUES (type);
5171 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5172 chain = TREE_CHAIN (chain))
5175 if (!chain)
5177 if (TYPE_NAME (type) == 0)
5178 warning ("case value `%ld' not in enumerated type",
5179 (long) TREE_INT_CST_LOW (n->low));
5180 else
5181 warning ("case value `%ld' not in enumerated type `%s'",
5182 (long) TREE_INT_CST_LOW (n->low),
5183 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5184 == IDENTIFIER_NODE)
5185 ? TYPE_NAME (type)
5186 : DECL_NAME (TYPE_NAME (type))));
5188 if (!tree_int_cst_equal (n->low, n->high))
5190 for (chain = TYPE_VALUES (type);
5191 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5192 chain = TREE_CHAIN (chain))
5195 if (!chain)
5197 if (TYPE_NAME (type) == 0)
5198 warning ("case value `%ld' not in enumerated type",
5199 (long) TREE_INT_CST_LOW (n->high));
5200 else
5201 warning ("case value `%ld' not in enumerated type `%s'",
5202 (long) TREE_INT_CST_LOW (n->high),
5203 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5204 == IDENTIFIER_NODE)
5205 ? TYPE_NAME (type)
5206 : DECL_NAME (TYPE_NAME (type))));
5213 /* Maximum number of case bit tests. */
5214 #define MAX_CASE_BIT_TESTS 3
5216 /* By default, enable case bit tests on targets with ashlsi3. */
5217 #ifndef CASE_USE_BIT_TESTS
5218 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
5219 != CODE_FOR_nothing)
5220 #endif
5223 /* A case_bit_test represents a set of case nodes that may be
5224 selected from using a bit-wise comparison. HI and LO hold
5225 the integer to be tested against, LABEL contains the label
5226 to jump to upon success and BITS counts the number of case
5227 nodes handled by this test, typically the number of bits
5228 set in HI:LO. */
5230 struct case_bit_test
5232 HOST_WIDE_INT hi;
5233 HOST_WIDE_INT lo;
5234 rtx label;
5235 int bits;
5238 /* Determine whether "1 << x" is relatively cheap in word_mode. */
5240 static bool lshift_cheap_p ()
5242 static bool init = false;
5243 static bool cheap = true;
5245 if (!init)
5247 rtx reg = gen_rtx_REG (word_mode, 10000);
5248 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
5249 cheap = cost < COSTS_N_INSNS (3);
5250 init = true;
5253 return cheap;
5256 /* Comparison function for qsort to order bit tests by decreasing
5257 number of case nodes, i.e. the node with the most cases gets
5258 tested first. */
5260 static int case_bit_test_cmp (p1, p2)
5261 const void *p1;
5262 const void *p2;
5264 const struct case_bit_test *d1 = p1;
5265 const struct case_bit_test *d2 = p2;
5267 return d2->bits - d1->bits;
5270 /* Expand a switch statement by a short sequence of bit-wise
5271 comparisons. "switch(x)" is effectively converted into
5272 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
5273 integer constants.
5275 INDEX_EXPR is the value being switched on, which is of
5276 type INDEX_TYPE. MINVAL is the lowest case value of in
5277 the case nodes, of INDEX_TYPE type, and RANGE is highest
5278 value minus MINVAL, also of type INDEX_TYPE. NODES is
5279 the set of case nodes, and DEFAULT_LABEL is the label to
5280 branch to should none of the cases match.
5282 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
5283 node targets. */
5285 static void
5286 emit_case_bit_tests (index_type, index_expr, minval, range,
5287 nodes, default_label)
5288 tree index_type, index_expr, minval, range;
5289 case_node_ptr nodes;
5290 rtx default_label;
5292 struct case_bit_test test[MAX_CASE_BIT_TESTS];
5293 enum machine_mode mode;
5294 rtx expr, index, label;
5295 unsigned int i,j,lo,hi;
5296 struct case_node *n;
5297 unsigned int count;
5299 count = 0;
5300 for (n = nodes; n; n = n->right)
5302 label = label_rtx (n->code_label);
5303 for (i = 0; i < count; i++)
5304 if (same_case_target_p (label, test[i].label))
5305 break;
5307 if (i == count)
5309 if (count >= MAX_CASE_BIT_TESTS)
5310 abort ();
5311 test[i].hi = 0;
5312 test[i].lo = 0;
5313 test[i].label = label;
5314 test[i].bits = 1;
5315 count++;
5317 else
5318 test[i].bits++;
5320 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5321 n->low, minval)), 1);
5322 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5323 n->high, minval)), 1);
5324 for (j = lo; j <= hi; j++)
5325 if (j >= HOST_BITS_PER_WIDE_INT)
5326 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
5327 else
5328 test[i].lo |= (HOST_WIDE_INT) 1 << j;
5331 qsort (test, count, sizeof(*test), case_bit_test_cmp);
5333 index_expr = fold (build (MINUS_EXPR, index_type,
5334 convert (index_type, index_expr),
5335 convert (index_type, minval)));
5336 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5337 emit_queue ();
5338 index = protect_from_queue (index, 0);
5339 do_pending_stack_adjust ();
5341 mode = TYPE_MODE (index_type);
5342 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
5343 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
5344 default_label);
5346 index = convert_to_mode (word_mode, index, 0);
5347 index = expand_binop (word_mode, ashl_optab, const1_rtx,
5348 index, NULL_RTX, 1, OPTAB_WIDEN);
5350 for (i = 0; i < count; i++)
5352 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
5353 expr = expand_binop (word_mode, and_optab, index, expr,
5354 NULL_RTX, 1, OPTAB_WIDEN);
5355 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
5356 word_mode, 1, test[i].label);
5359 emit_jump (default_label);
5362 /* Terminate a case (Pascal) or switch (C) statement
5363 in which ORIG_INDEX is the expression to be tested.
5364 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5365 type as given in the source before any compiler conversions.
5366 Generate the code to test it and jump to the right place. */
5368 void
5369 expand_end_case_type (orig_index, orig_type)
5370 tree orig_index, orig_type;
5372 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5373 rtx default_label = 0;
5374 struct case_node *n, *m;
5375 unsigned int count, uniq;
5376 rtx index;
5377 rtx table_label;
5378 int ncases;
5379 rtx *labelvec;
5380 int i;
5381 rtx before_case, end, lab;
5382 struct nesting *thiscase = case_stack;
5383 tree index_expr, index_type;
5384 bool exit_done = false;
5385 int unsignedp;
5387 /* Don't crash due to previous errors. */
5388 if (thiscase == NULL)
5389 return;
5391 index_expr = thiscase->data.case_stmt.index_expr;
5392 index_type = TREE_TYPE (index_expr);
5393 unsignedp = TREE_UNSIGNED (index_type);
5394 if (orig_type == NULL)
5395 orig_type = TREE_TYPE (orig_index);
5397 do_pending_stack_adjust ();
5399 /* This might get a spurious warning in the presence of a syntax error;
5400 it could be fixed by moving the call to check_seenlabel after the
5401 check for error_mark_node, and copying the code of check_seenlabel that
5402 deals with case_stack->data.case_stmt.line_number_status /
5403 restore_line_number_status in front of the call to end_cleanup_deferral;
5404 However, this might miss some useful warnings in the presence of
5405 non-syntax errors. */
5406 check_seenlabel ();
5408 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5409 if (index_type != error_mark_node)
5411 /* If the switch expression was an enumerated type, check that
5412 exactly all enumeration literals are covered by the cases.
5413 The check is made when -Wswitch was specified and there is no
5414 default case, or when -Wswitch-enum was specified. */
5415 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5416 || warn_switch_enum)
5417 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5418 && TREE_CODE (index_expr) != INTEGER_CST)
5419 check_for_full_enumeration_handling (orig_type);
5421 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5422 warning ("switch missing default case");
5424 /* If we don't have a default-label, create one here,
5425 after the body of the switch. */
5426 if (thiscase->data.case_stmt.default_label == 0)
5428 thiscase->data.case_stmt.default_label
5429 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5430 /* Share the exit label if possible. */
5431 if (thiscase->exit_label)
5433 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
5434 thiscase->exit_label);
5435 exit_done = true;
5437 expand_label (thiscase->data.case_stmt.default_label);
5439 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5441 before_case = get_last_insn ();
5443 if (thiscase->data.case_stmt.case_list
5444 && thiscase->data.case_stmt.case_list->left)
5445 thiscase->data.case_stmt.case_list
5446 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5448 /* Simplify the case-list before we count it. */
5449 group_case_nodes (thiscase->data.case_stmt.case_list);
5450 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5451 default_label);
5453 /* Get upper and lower bounds of case values.
5454 Also convert all the case values to the index expr's data type. */
5456 uniq = 0;
5457 count = 0;
5458 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5460 /* Check low and high label values are integers. */
5461 if (TREE_CODE (n->low) != INTEGER_CST)
5462 abort ();
5463 if (TREE_CODE (n->high) != INTEGER_CST)
5464 abort ();
5466 n->low = convert (index_type, n->low);
5467 n->high = convert (index_type, n->high);
5469 /* Count the elements and track the largest and smallest
5470 of them (treating them as signed even if they are not). */
5471 if (count++ == 0)
5473 minval = n->low;
5474 maxval = n->high;
5476 else
5478 if (INT_CST_LT (n->low, minval))
5479 minval = n->low;
5480 if (INT_CST_LT (maxval, n->high))
5481 maxval = n->high;
5483 /* A range counts double, since it requires two compares. */
5484 if (! tree_int_cst_equal (n->low, n->high))
5485 count++;
5487 /* Count the number of unique case node targets. */
5488 uniq++;
5489 lab = label_rtx (n->code_label);
5490 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
5491 if (same_case_target_p (label_rtx (m->code_label), lab))
5493 uniq--;
5494 break;
5498 /* Compute span of values. */
5499 if (count != 0)
5500 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5502 end_cleanup_deferral ();
5504 if (count == 0)
5506 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5507 emit_queue ();
5508 emit_jump (default_label);
5511 /* Try implementing this switch statement by a short sequence of
5512 bit-wise comparisons. However, we let the binary-tree case
5513 below handle constant index expressions. */
5514 else if (CASE_USE_BIT_TESTS
5515 && ! TREE_CONSTANT (index_expr)
5516 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
5517 && compare_tree_int (range, 0) > 0
5518 && lshift_cheap_p ()
5519 && ((uniq == 1 && count >= 3)
5520 || (uniq == 2 && count >= 5)
5521 || (uniq == 3 && count >= 6)))
5523 /* Optimize the case where all the case values fit in a
5524 word without having to subtract MINVAL. In this case,
5525 we can optimize away the subtraction. */
5526 if (compare_tree_int (minval, 0) > 0
5527 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
5529 minval = integer_zero_node;
5530 range = maxval;
5532 emit_case_bit_tests (index_type, index_expr, minval, range,
5533 thiscase->data.case_stmt.case_list,
5534 default_label);
5537 /* If range of values is much bigger than number of values,
5538 make a sequence of conditional branches instead of a dispatch.
5539 If the switch-index is a constant, do it this way
5540 because we can optimize it. */
5542 else if (count < case_values_threshold ()
5543 || compare_tree_int (range, 10 * count) > 0
5544 /* RANGE may be signed, and really large ranges will show up
5545 as negative numbers. */
5546 || compare_tree_int (range, 0) < 0
5547 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5548 || flag_pic
5549 #endif
5550 || TREE_CONSTANT (index_expr))
5552 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5554 /* If the index is a short or char that we do not have
5555 an insn to handle comparisons directly, convert it to
5556 a full integer now, rather than letting each comparison
5557 generate the conversion. */
5559 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5560 && ! have_insn_for (COMPARE, GET_MODE (index)))
5562 enum machine_mode wider_mode;
5563 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5564 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5565 if (have_insn_for (COMPARE, wider_mode))
5567 index = convert_to_mode (wider_mode, index, unsignedp);
5568 break;
5572 emit_queue ();
5573 do_pending_stack_adjust ();
5575 index = protect_from_queue (index, 0);
5576 if (GET_CODE (index) == MEM)
5577 index = copy_to_reg (index);
5578 if (GET_CODE (index) == CONST_INT
5579 || TREE_CODE (index_expr) == INTEGER_CST)
5581 /* Make a tree node with the proper constant value
5582 if we don't already have one. */
5583 if (TREE_CODE (index_expr) != INTEGER_CST)
5585 index_expr
5586 = build_int_2 (INTVAL (index),
5587 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5588 index_expr = convert (index_type, index_expr);
5591 /* For constant index expressions we need only
5592 issue an unconditional branch to the appropriate
5593 target code. The job of removing any unreachable
5594 code is left to the optimisation phase if the
5595 "-O" option is specified. */
5596 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5597 if (! tree_int_cst_lt (index_expr, n->low)
5598 && ! tree_int_cst_lt (n->high, index_expr))
5599 break;
5601 if (n)
5602 emit_jump (label_rtx (n->code_label));
5603 else
5604 emit_jump (default_label);
5606 else
5608 /* If the index expression is not constant we generate
5609 a binary decision tree to select the appropriate
5610 target code. This is done as follows:
5612 The list of cases is rearranged into a binary tree,
5613 nearly optimal assuming equal probability for each case.
5615 The tree is transformed into RTL, eliminating
5616 redundant test conditions at the same time.
5618 If program flow could reach the end of the
5619 decision tree an unconditional jump to the
5620 default code is emitted. */
5622 use_cost_table
5623 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5624 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5625 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5626 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5627 default_label, index_type);
5628 emit_jump_if_reachable (default_label);
5631 else
5633 table_label = gen_label_rtx ();
5634 if (! try_casesi (index_type, index_expr, minval, range,
5635 table_label, default_label))
5637 index_type = thiscase->data.case_stmt.nominal_type;
5639 /* Index jumptables from zero for suitable values of
5640 minval to avoid a subtraction. */
5641 if (! optimize_size
5642 && compare_tree_int (minval, 0) > 0
5643 && compare_tree_int (minval, 3) < 0)
5645 minval = integer_zero_node;
5646 range = maxval;
5649 if (! try_tablejump (index_type, index_expr, minval, range,
5650 table_label, default_label))
5651 abort ();
5654 /* Get table of labels to jump to, in order of case index. */
5656 ncases = tree_low_cst (range, 0) + 1;
5657 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5658 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5660 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5662 /* Compute the low and high bounds relative to the minimum
5663 value since that should fit in a HOST_WIDE_INT while the
5664 actual values may not. */
5665 HOST_WIDE_INT i_low
5666 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5667 n->low, minval)), 1);
5668 HOST_WIDE_INT i_high
5669 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5670 n->high, minval)), 1);
5671 HOST_WIDE_INT i;
5673 for (i = i_low; i <= i_high; i ++)
5674 labelvec[i]
5675 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5678 /* Fill in the gaps with the default. */
5679 for (i = 0; i < ncases; i++)
5680 if (labelvec[i] == 0)
5681 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5683 /* Output the table */
5684 emit_label (table_label);
5686 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5687 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5688 gen_rtx_LABEL_REF (Pmode, table_label),
5689 gen_rtvec_v (ncases, labelvec),
5690 const0_rtx, const0_rtx));
5691 else
5692 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5693 gen_rtvec_v (ncases, labelvec)));
5695 /* If the case insn drops through the table,
5696 after the table we must jump to the default-label.
5697 Otherwise record no drop-through after the table. */
5698 #ifdef CASE_DROPS_THROUGH
5699 emit_jump (default_label);
5700 #else
5701 emit_barrier ();
5702 #endif
5705 before_case = NEXT_INSN (before_case);
5706 end = get_last_insn ();
5707 if (squeeze_notes (&before_case, &end))
5708 abort ();
5709 reorder_insns (before_case, end,
5710 thiscase->data.case_stmt.start);
5712 else
5713 end_cleanup_deferral ();
5715 if (thiscase->exit_label && !exit_done)
5716 emit_label (thiscase->exit_label);
5718 POPSTACK (case_stack);
5720 free_temp_slots ();
5723 /* Convert the tree NODE into a list linked by the right field, with the left
5724 field zeroed. RIGHT is used for recursion; it is a list to be placed
5725 rightmost in the resulting list. */
5727 static struct case_node *
5728 case_tree2list (node, right)
5729 struct case_node *node, *right;
5731 struct case_node *left;
5733 if (node->right)
5734 right = case_tree2list (node->right, right);
5736 node->right = right;
5737 if ((left = node->left))
5739 node->left = 0;
5740 return case_tree2list (left, node);
5743 return node;
5746 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5748 static void
5749 do_jump_if_equal (op1, op2, label, unsignedp)
5750 rtx op1, op2, label;
5751 int unsignedp;
5753 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5755 if (INTVAL (op1) == INTVAL (op2))
5756 emit_jump (label);
5758 else
5759 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5760 (GET_MODE (op1) == VOIDmode
5761 ? GET_MODE (op2) : GET_MODE (op1)),
5762 unsignedp, label);
5765 /* Not all case values are encountered equally. This function
5766 uses a heuristic to weight case labels, in cases where that
5767 looks like a reasonable thing to do.
5769 Right now, all we try to guess is text, and we establish the
5770 following weights:
5772 chars above space: 16
5773 digits: 16
5774 default: 12
5775 space, punct: 8
5776 tab: 4
5777 newline: 2
5778 other "\" chars: 1
5779 remaining chars: 0
5781 If we find any cases in the switch that are not either -1 or in the range
5782 of valid ASCII characters, or are control characters other than those
5783 commonly used with "\", don't treat this switch scanning text.
5785 Return 1 if these nodes are suitable for cost estimation, otherwise
5786 return 0. */
5788 static int
5789 estimate_case_costs (node)
5790 case_node_ptr node;
5792 tree min_ascii = integer_minus_one_node;
5793 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5794 case_node_ptr n;
5795 int i;
5797 /* If we haven't already made the cost table, make it now. Note that the
5798 lower bound of the table is -1, not zero. */
5800 if (! cost_table_initialized)
5802 cost_table_initialized = 1;
5804 for (i = 0; i < 128; i++)
5806 if (ISALNUM (i))
5807 COST_TABLE (i) = 16;
5808 else if (ISPUNCT (i))
5809 COST_TABLE (i) = 8;
5810 else if (ISCNTRL (i))
5811 COST_TABLE (i) = -1;
5814 COST_TABLE (' ') = 8;
5815 COST_TABLE ('\t') = 4;
5816 COST_TABLE ('\0') = 4;
5817 COST_TABLE ('\n') = 2;
5818 COST_TABLE ('\f') = 1;
5819 COST_TABLE ('\v') = 1;
5820 COST_TABLE ('\b') = 1;
5823 /* See if all the case expressions look like text. It is text if the
5824 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5825 as signed arithmetic since we don't want to ever access cost_table with a
5826 value less than -1. Also check that none of the constants in a range
5827 are strange control characters. */
5829 for (n = node; n; n = n->right)
5831 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5832 return 0;
5834 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5835 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5836 if (COST_TABLE (i) < 0)
5837 return 0;
5840 /* All interesting values are within the range of interesting
5841 ASCII characters. */
5842 return 1;
5845 /* Determine whether two case labels branch to the same target. */
5847 static bool
5848 same_case_target_p (l1, l2)
5849 rtx l1, l2;
5851 rtx i1, i2;
5853 if (l1 == l2)
5854 return true;
5856 i1 = next_real_insn (l1);
5857 i2 = next_real_insn (l2);
5858 if (i1 == i2)
5859 return true;
5861 if (i1 && simplejump_p (i1))
5863 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5866 if (i2 && simplejump_p (i2))
5868 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5870 return l1 == l2;
5873 /* Delete nodes that branch to the default label from a list of
5874 case nodes. Eg. case 5: default: becomes just default: */
5876 static void
5877 strip_default_case_nodes (prev, deflab)
5878 case_node_ptr *prev;
5879 rtx deflab;
5881 case_node_ptr ptr;
5883 while (*prev)
5885 ptr = *prev;
5886 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5887 *prev = ptr->right;
5888 else
5889 prev = &ptr->right;
5893 /* Scan an ordered list of case nodes
5894 combining those with consecutive values or ranges.
5896 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5898 static void
5899 group_case_nodes (head)
5900 case_node_ptr head;
5902 case_node_ptr node = head;
5904 while (node)
5906 rtx lab = label_rtx (node->code_label);
5907 case_node_ptr np = node;
5909 /* Try to group the successors of NODE with NODE. */
5910 while (((np = np->right) != 0)
5911 /* Do they jump to the same place? */
5912 && same_case_target_p (label_rtx (np->code_label), lab)
5913 /* Are their ranges consecutive? */
5914 && tree_int_cst_equal (np->low,
5915 fold (build (PLUS_EXPR,
5916 TREE_TYPE (node->high),
5917 node->high,
5918 integer_one_node)))
5919 /* An overflow is not consecutive. */
5920 && tree_int_cst_lt (node->high,
5921 fold (build (PLUS_EXPR,
5922 TREE_TYPE (node->high),
5923 node->high,
5924 integer_one_node))))
5926 node->high = np->high;
5928 /* NP is the first node after NODE which can't be grouped with it.
5929 Delete the nodes in between, and move on to that node. */
5930 node->right = np;
5931 node = np;
5935 /* Take an ordered list of case nodes
5936 and transform them into a near optimal binary tree,
5937 on the assumption that any target code selection value is as
5938 likely as any other.
5940 The transformation is performed by splitting the ordered
5941 list into two equal sections plus a pivot. The parts are
5942 then attached to the pivot as left and right branches. Each
5943 branch is then transformed recursively. */
5945 static void
5946 balance_case_nodes (head, parent)
5947 case_node_ptr *head;
5948 case_node_ptr parent;
5950 case_node_ptr np;
5952 np = *head;
5953 if (np)
5955 int cost = 0;
5956 int i = 0;
5957 int ranges = 0;
5958 case_node_ptr *npp;
5959 case_node_ptr left;
5961 /* Count the number of entries on branch. Also count the ranges. */
5963 while (np)
5965 if (!tree_int_cst_equal (np->low, np->high))
5967 ranges++;
5968 if (use_cost_table)
5969 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5972 if (use_cost_table)
5973 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5975 i++;
5976 np = np->right;
5979 if (i > 2)
5981 /* Split this list if it is long enough for that to help. */
5982 npp = head;
5983 left = *npp;
5984 if (use_cost_table)
5986 /* Find the place in the list that bisects the list's total cost,
5987 Here I gets half the total cost. */
5988 int n_moved = 0;
5989 i = (cost + 1) / 2;
5990 while (1)
5992 /* Skip nodes while their cost does not reach that amount. */
5993 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5994 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5995 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5996 if (i <= 0)
5997 break;
5998 npp = &(*npp)->right;
5999 n_moved += 1;
6001 if (n_moved == 0)
6003 /* Leave this branch lopsided, but optimize left-hand
6004 side and fill in `parent' fields for right-hand side. */
6005 np = *head;
6006 np->parent = parent;
6007 balance_case_nodes (&np->left, np);
6008 for (; np->right; np = np->right)
6009 np->right->parent = np;
6010 return;
6013 /* If there are just three nodes, split at the middle one. */
6014 else if (i == 3)
6015 npp = &(*npp)->right;
6016 else
6018 /* Find the place in the list that bisects the list's total cost,
6019 where ranges count as 2.
6020 Here I gets half the total cost. */
6021 i = (i + ranges + 1) / 2;
6022 while (1)
6024 /* Skip nodes while their cost does not reach that amount. */
6025 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
6026 i--;
6027 i--;
6028 if (i <= 0)
6029 break;
6030 npp = &(*npp)->right;
6033 *head = np = *npp;
6034 *npp = 0;
6035 np->parent = parent;
6036 np->left = left;
6038 /* Optimize each of the two split parts. */
6039 balance_case_nodes (&np->left, np);
6040 balance_case_nodes (&np->right, np);
6042 else
6044 /* Else leave this branch as one level,
6045 but fill in `parent' fields. */
6046 np = *head;
6047 np->parent = parent;
6048 for (; np->right; np = np->right)
6049 np->right->parent = np;
6054 /* Search the parent sections of the case node tree
6055 to see if a test for the lower bound of NODE would be redundant.
6056 INDEX_TYPE is the type of the index expression.
6058 The instructions to generate the case decision tree are
6059 output in the same order as nodes are processed so it is
6060 known that if a parent node checks the range of the current
6061 node minus one that the current node is bounded at its lower
6062 span. Thus the test would be redundant. */
6064 static int
6065 node_has_low_bound (node, index_type)
6066 case_node_ptr node;
6067 tree index_type;
6069 tree low_minus_one;
6070 case_node_ptr pnode;
6072 /* If the lower bound of this node is the lowest value in the index type,
6073 we need not test it. */
6075 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6076 return 1;
6078 /* If this node has a left branch, the value at the left must be less
6079 than that at this node, so it cannot be bounded at the bottom and
6080 we need not bother testing any further. */
6082 if (node->left)
6083 return 0;
6085 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6086 node->low, integer_one_node));
6088 /* If the subtraction above overflowed, we can't verify anything.
6089 Otherwise, look for a parent that tests our value - 1. */
6091 if (! tree_int_cst_lt (low_minus_one, node->low))
6092 return 0;
6094 for (pnode = node->parent; pnode; pnode = pnode->parent)
6095 if (tree_int_cst_equal (low_minus_one, pnode->high))
6096 return 1;
6098 return 0;
6101 /* Search the parent sections of the case node tree
6102 to see if a test for the upper bound of NODE would be redundant.
6103 INDEX_TYPE is the type of the index expression.
6105 The instructions to generate the case decision tree are
6106 output in the same order as nodes are processed so it is
6107 known that if a parent node checks the range of the current
6108 node plus one that the current node is bounded at its upper
6109 span. Thus the test would be redundant. */
6111 static int
6112 node_has_high_bound (node, index_type)
6113 case_node_ptr node;
6114 tree index_type;
6116 tree high_plus_one;
6117 case_node_ptr pnode;
6119 /* If there is no upper bound, obviously no test is needed. */
6121 if (TYPE_MAX_VALUE (index_type) == NULL)
6122 return 1;
6124 /* If the upper bound of this node is the highest value in the type
6125 of the index expression, we need not test against it. */
6127 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6128 return 1;
6130 /* If this node has a right branch, the value at the right must be greater
6131 than that at this node, so it cannot be bounded at the top and
6132 we need not bother testing any further. */
6134 if (node->right)
6135 return 0;
6137 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6138 node->high, integer_one_node));
6140 /* If the addition above overflowed, we can't verify anything.
6141 Otherwise, look for a parent that tests our value + 1. */
6143 if (! tree_int_cst_lt (node->high, high_plus_one))
6144 return 0;
6146 for (pnode = node->parent; pnode; pnode = pnode->parent)
6147 if (tree_int_cst_equal (high_plus_one, pnode->low))
6148 return 1;
6150 return 0;
6153 /* Search the parent sections of the
6154 case node tree to see if both tests for the upper and lower
6155 bounds of NODE would be redundant. */
6157 static int
6158 node_is_bounded (node, index_type)
6159 case_node_ptr node;
6160 tree index_type;
6162 return (node_has_low_bound (node, index_type)
6163 && node_has_high_bound (node, index_type));
6166 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6168 static void
6169 emit_jump_if_reachable (label)
6170 rtx label;
6172 if (GET_CODE (get_last_insn ()) != BARRIER)
6173 emit_jump (label);
6176 /* Emit step-by-step code to select a case for the value of INDEX.
6177 The thus generated decision tree follows the form of the
6178 case-node binary tree NODE, whose nodes represent test conditions.
6179 INDEX_TYPE is the type of the index of the switch.
6181 Care is taken to prune redundant tests from the decision tree
6182 by detecting any boundary conditions already checked by
6183 emitted rtx. (See node_has_high_bound, node_has_low_bound
6184 and node_is_bounded, above.)
6186 Where the test conditions can be shown to be redundant we emit
6187 an unconditional jump to the target code. As a further
6188 optimization, the subordinates of a tree node are examined to
6189 check for bounded nodes. In this case conditional and/or
6190 unconditional jumps as a result of the boundary check for the
6191 current node are arranged to target the subordinates associated
6192 code for out of bound conditions on the current node.
6194 We can assume that when control reaches the code generated here,
6195 the index value has already been compared with the parents
6196 of this node, and determined to be on the same side of each parent
6197 as this node is. Thus, if this node tests for the value 51,
6198 and a parent tested for 52, we don't need to consider
6199 the possibility of a value greater than 51. If another parent
6200 tests for the value 50, then this node need not test anything. */
6202 static void
6203 emit_case_nodes (index, node, default_label, index_type)
6204 rtx index;
6205 case_node_ptr node;
6206 rtx default_label;
6207 tree index_type;
6209 /* If INDEX has an unsigned type, we must make unsigned branches. */
6210 int unsignedp = TREE_UNSIGNED (index_type);
6211 enum machine_mode mode = GET_MODE (index);
6212 enum machine_mode imode = TYPE_MODE (index_type);
6214 /* See if our parents have already tested everything for us.
6215 If they have, emit an unconditional jump for this node. */
6216 if (node_is_bounded (node, index_type))
6217 emit_jump (label_rtx (node->code_label));
6219 else if (tree_int_cst_equal (node->low, node->high))
6221 /* Node is single valued. First see if the index expression matches
6222 this node and then check our children, if any. */
6224 do_jump_if_equal (index,
6225 convert_modes (mode, imode,
6226 expand_expr (node->low, NULL_RTX,
6227 VOIDmode, 0),
6228 unsignedp),
6229 label_rtx (node->code_label), unsignedp);
6231 if (node->right != 0 && node->left != 0)
6233 /* This node has children on both sides.
6234 Dispatch to one side or the other
6235 by comparing the index value with this node's value.
6236 If one subtree is bounded, check that one first,
6237 so we can avoid real branches in the tree. */
6239 if (node_is_bounded (node->right, index_type))
6241 emit_cmp_and_jump_insns (index,
6242 convert_modes
6243 (mode, imode,
6244 expand_expr (node->high, NULL_RTX,
6245 VOIDmode, 0),
6246 unsignedp),
6247 GT, NULL_RTX, mode, unsignedp,
6248 label_rtx (node->right->code_label));
6249 emit_case_nodes (index, node->left, default_label, index_type);
6252 else if (node_is_bounded (node->left, index_type))
6254 emit_cmp_and_jump_insns (index,
6255 convert_modes
6256 (mode, imode,
6257 expand_expr (node->high, NULL_RTX,
6258 VOIDmode, 0),
6259 unsignedp),
6260 LT, NULL_RTX, mode, unsignedp,
6261 label_rtx (node->left->code_label));
6262 emit_case_nodes (index, node->right, default_label, index_type);
6265 else
6267 /* Neither node is bounded. First distinguish the two sides;
6268 then emit the code for one side at a time. */
6270 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6272 /* See if the value is on the right. */
6273 emit_cmp_and_jump_insns (index,
6274 convert_modes
6275 (mode, imode,
6276 expand_expr (node->high, NULL_RTX,
6277 VOIDmode, 0),
6278 unsignedp),
6279 GT, NULL_RTX, mode, unsignedp,
6280 label_rtx (test_label));
6282 /* Value must be on the left.
6283 Handle the left-hand subtree. */
6284 emit_case_nodes (index, node->left, default_label, index_type);
6285 /* If left-hand subtree does nothing,
6286 go to default. */
6287 emit_jump_if_reachable (default_label);
6289 /* Code branches here for the right-hand subtree. */
6290 expand_label (test_label);
6291 emit_case_nodes (index, node->right, default_label, index_type);
6295 else if (node->right != 0 && node->left == 0)
6297 /* Here we have a right child but no left so we issue conditional
6298 branch to default and process the right child.
6300 Omit the conditional branch to default if we it avoid only one
6301 right child; it costs too much space to save so little time. */
6303 if (node->right->right || node->right->left
6304 || !tree_int_cst_equal (node->right->low, node->right->high))
6306 if (!node_has_low_bound (node, index_type))
6308 emit_cmp_and_jump_insns (index,
6309 convert_modes
6310 (mode, imode,
6311 expand_expr (node->high, NULL_RTX,
6312 VOIDmode, 0),
6313 unsignedp),
6314 LT, NULL_RTX, mode, unsignedp,
6315 default_label);
6318 emit_case_nodes (index, node->right, default_label, index_type);
6320 else
6321 /* We cannot process node->right normally
6322 since we haven't ruled out the numbers less than
6323 this node's value. So handle node->right explicitly. */
6324 do_jump_if_equal (index,
6325 convert_modes
6326 (mode, imode,
6327 expand_expr (node->right->low, NULL_RTX,
6328 VOIDmode, 0),
6329 unsignedp),
6330 label_rtx (node->right->code_label), unsignedp);
6333 else if (node->right == 0 && node->left != 0)
6335 /* Just one subtree, on the left. */
6336 if (node->left->left || node->left->right
6337 || !tree_int_cst_equal (node->left->low, node->left->high))
6339 if (!node_has_high_bound (node, index_type))
6341 emit_cmp_and_jump_insns (index,
6342 convert_modes
6343 (mode, imode,
6344 expand_expr (node->high, NULL_RTX,
6345 VOIDmode, 0),
6346 unsignedp),
6347 GT, NULL_RTX, mode, unsignedp,
6348 default_label);
6351 emit_case_nodes (index, node->left, default_label, index_type);
6353 else
6354 /* We cannot process node->left normally
6355 since we haven't ruled out the numbers less than
6356 this node's value. So handle node->left explicitly. */
6357 do_jump_if_equal (index,
6358 convert_modes
6359 (mode, imode,
6360 expand_expr (node->left->low, NULL_RTX,
6361 VOIDmode, 0),
6362 unsignedp),
6363 label_rtx (node->left->code_label), unsignedp);
6366 else
6368 /* Node is a range. These cases are very similar to those for a single
6369 value, except that we do not start by testing whether this node
6370 is the one to branch to. */
6372 if (node->right != 0 && node->left != 0)
6374 /* Node has subtrees on both sides.
6375 If the right-hand subtree is bounded,
6376 test for it first, since we can go straight there.
6377 Otherwise, we need to make a branch in the control structure,
6378 then handle the two subtrees. */
6379 tree test_label = 0;
6381 if (node_is_bounded (node->right, index_type))
6382 /* Right hand node is fully bounded so we can eliminate any
6383 testing and branch directly to the target code. */
6384 emit_cmp_and_jump_insns (index,
6385 convert_modes
6386 (mode, imode,
6387 expand_expr (node->high, NULL_RTX,
6388 VOIDmode, 0),
6389 unsignedp),
6390 GT, NULL_RTX, mode, unsignedp,
6391 label_rtx (node->right->code_label));
6392 else
6394 /* Right hand node requires testing.
6395 Branch to a label where we will handle it later. */
6397 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6398 emit_cmp_and_jump_insns (index,
6399 convert_modes
6400 (mode, imode,
6401 expand_expr (node->high, NULL_RTX,
6402 VOIDmode, 0),
6403 unsignedp),
6404 GT, NULL_RTX, mode, unsignedp,
6405 label_rtx (test_label));
6408 /* Value belongs to this node or to the left-hand subtree. */
6410 emit_cmp_and_jump_insns (index,
6411 convert_modes
6412 (mode, imode,
6413 expand_expr (node->low, NULL_RTX,
6414 VOIDmode, 0),
6415 unsignedp),
6416 GE, NULL_RTX, mode, unsignedp,
6417 label_rtx (node->code_label));
6419 /* Handle the left-hand subtree. */
6420 emit_case_nodes (index, node->left, default_label, index_type);
6422 /* If right node had to be handled later, do that now. */
6424 if (test_label)
6426 /* If the left-hand subtree fell through,
6427 don't let it fall into the right-hand subtree. */
6428 emit_jump_if_reachable (default_label);
6430 expand_label (test_label);
6431 emit_case_nodes (index, node->right, default_label, index_type);
6435 else if (node->right != 0 && node->left == 0)
6437 /* Deal with values to the left of this node,
6438 if they are possible. */
6439 if (!node_has_low_bound (node, index_type))
6441 emit_cmp_and_jump_insns (index,
6442 convert_modes
6443 (mode, imode,
6444 expand_expr (node->low, NULL_RTX,
6445 VOIDmode, 0),
6446 unsignedp),
6447 LT, NULL_RTX, mode, unsignedp,
6448 default_label);
6451 /* Value belongs to this node or to the right-hand subtree. */
6453 emit_cmp_and_jump_insns (index,
6454 convert_modes
6455 (mode, imode,
6456 expand_expr (node->high, NULL_RTX,
6457 VOIDmode, 0),
6458 unsignedp),
6459 LE, NULL_RTX, mode, unsignedp,
6460 label_rtx (node->code_label));
6462 emit_case_nodes (index, node->right, default_label, index_type);
6465 else if (node->right == 0 && node->left != 0)
6467 /* Deal with values to the right of this node,
6468 if they are possible. */
6469 if (!node_has_high_bound (node, index_type))
6471 emit_cmp_and_jump_insns (index,
6472 convert_modes
6473 (mode, imode,
6474 expand_expr (node->high, NULL_RTX,
6475 VOIDmode, 0),
6476 unsignedp),
6477 GT, NULL_RTX, mode, unsignedp,
6478 default_label);
6481 /* Value belongs to this node or to the left-hand subtree. */
6483 emit_cmp_and_jump_insns (index,
6484 convert_modes
6485 (mode, imode,
6486 expand_expr (node->low, NULL_RTX,
6487 VOIDmode, 0),
6488 unsignedp),
6489 GE, NULL_RTX, mode, unsignedp,
6490 label_rtx (node->code_label));
6492 emit_case_nodes (index, node->left, default_label, index_type);
6495 else
6497 /* Node has no children so we check low and high bounds to remove
6498 redundant tests. Only one of the bounds can exist,
6499 since otherwise this node is bounded--a case tested already. */
6500 int high_bound = node_has_high_bound (node, index_type);
6501 int low_bound = node_has_low_bound (node, index_type);
6503 if (!high_bound && low_bound)
6505 emit_cmp_and_jump_insns (index,
6506 convert_modes
6507 (mode, imode,
6508 expand_expr (node->high, NULL_RTX,
6509 VOIDmode, 0),
6510 unsignedp),
6511 GT, NULL_RTX, mode, unsignedp,
6512 default_label);
6515 else if (!low_bound && high_bound)
6517 emit_cmp_and_jump_insns (index,
6518 convert_modes
6519 (mode, imode,
6520 expand_expr (node->low, NULL_RTX,
6521 VOIDmode, 0),
6522 unsignedp),
6523 LT, NULL_RTX, mode, unsignedp,
6524 default_label);
6526 else if (!low_bound && !high_bound)
6528 /* Widen LOW and HIGH to the same width as INDEX. */
6529 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6530 tree low = build1 (CONVERT_EXPR, type, node->low);
6531 tree high = build1 (CONVERT_EXPR, type, node->high);
6532 rtx low_rtx, new_index, new_bound;
6534 /* Instead of doing two branches, emit one unsigned branch for
6535 (index-low) > (high-low). */
6536 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6537 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6538 NULL_RTX, unsignedp,
6539 OPTAB_WIDEN);
6540 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6541 high, low)),
6542 NULL_RTX, mode, 0);
6544 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6545 mode, 1, default_label);
6548 emit_jump (label_rtx (node->code_label));
6553 #include "gt-stmt.h"