2004-02-11 Eric Christopher <echristo@redhat.com>
[official-gcc.git] / gcc / stmt.c
blobc33cea27152cc7f44b3e40d09e1399180d8b3ad9
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
60 #include "target.h"
61 #include "regs.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
77 node chain.
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
85 in order. */
87 struct case_node GTY(())
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
95 int balance;
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `loop_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting GTY(())
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 enum nesting_desc {
144 COND_NESTING,
145 LOOP_NESTING,
146 BLOCK_NESTING,
147 CASE_NESTING
148 } desc;
149 union nesting_u
151 /* For conds (if-then and if-then-else statements). */
152 struct nesting_cond
154 /* Label for the end of the if construct.
155 There is none if EXITFLAG was not set
156 and no `else' has been seen yet. */
157 rtx endif_label;
158 /* Label for the end of this alternative.
159 This may be the end of the if or the next else/elseif. */
160 rtx next_label;
161 } GTY ((tag ("COND_NESTING"))) cond;
162 /* For loops. */
163 struct nesting_loop
165 /* Label at the top of the loop; place to loop back to. */
166 rtx start_label;
167 /* Label at the end of the whole construct. */
168 rtx end_label;
169 /* Label for `continue' statement to jump to;
170 this is in front of the stepper of the loop. */
171 rtx continue_label;
172 } GTY ((tag ("LOOP_NESTING"))) loop;
173 /* For variable binding contours. */
174 struct nesting_block
176 /* Sequence number of this binding contour within the function,
177 in order of entry. */
178 int block_start_count;
179 /* Nonzero => value to restore stack to on exit. */
180 rtx stack_level;
181 /* The NOTE that starts this contour.
182 Used by expand_goto to check whether the destination
183 is within each contour or not. */
184 rtx first_insn;
185 /* Innermost containing binding contour that has a stack level. */
186 struct nesting *innermost_stack_block;
187 /* List of cleanups to be run on exit from this contour.
188 This is a list of expressions to be evaluated.
189 The TREE_PURPOSE of each link is the ..._DECL node
190 which the cleanup pertains to. */
191 tree cleanups;
192 /* List of cleanup-lists of blocks containing this block,
193 as they were at the locus where this block appears.
194 There is an element for each containing block,
195 ordered innermost containing block first.
196 The tail of this list can be 0,
197 if all remaining elements would be empty lists.
198 The element's TREE_VALUE is the cleanup-list of that block,
199 which may be null. */
200 tree outer_cleanups;
201 /* Chain of labels defined inside this binding contour.
202 For contours that have stack levels or cleanups. */
203 struct label_chain *label_chain;
204 /* Nonzero if this is associated with an EH region. */
205 int exception_region;
206 /* The saved target_temp_slot_level from our outer block.
207 We may reset target_temp_slot_level to be the level of
208 this block, if that is done, target_temp_slot_level
209 reverts to the saved target_temp_slot_level at the very
210 end of the block. */
211 int block_target_temp_slot_level;
212 /* True if we are currently emitting insns in an area of
213 output code that is controlled by a conditional
214 expression. This is used by the cleanup handling code to
215 generate conditional cleanup actions. */
216 int conditional_code;
217 /* A place to move the start of the exception region for any
218 of the conditional cleanups, must be at the end or after
219 the start of the last unconditional cleanup, and before any
220 conditional branch points. */
221 rtx last_unconditional_cleanup;
222 } GTY ((tag ("BLOCK_NESTING"))) block;
223 /* For switch (C) or case (Pascal) statements,
224 and also for dummies (see `expand_start_case_dummy'). */
225 struct nesting_case
227 /* The insn after which the case dispatch should finally
228 be emitted. Zero for a dummy. */
229 rtx start;
230 /* A list of case labels; it is first built as an AVL tree.
231 During expand_end_case, this is converted to a list, and may be
232 rearranged into a nearly balanced binary tree. */
233 struct case_node *case_list;
234 /* Label to jump to if no case matches. */
235 tree default_label;
236 /* The expression to be dispatched on. */
237 tree index_expr;
238 /* Type that INDEX_EXPR should be converted to. */
239 tree nominal_type;
240 /* Name of this kind of statement, for warnings. */
241 const char *printname;
242 /* Used to save no_line_numbers till we see the first case label.
243 We set this to -1 when we see the first case label in this
244 case statement. */
245 int line_number_status;
246 } GTY ((tag ("CASE_NESTING"))) case_stmt;
247 } GTY ((desc ("%1.desc"))) data;
250 /* Allocate and return a new `struct nesting'. */
252 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
254 /* Pop the nesting stack element by element until we pop off
255 the element which is at the top of STACK.
256 Update all the other stacks, popping off elements from them
257 as we pop them from nesting_stack. */
259 #define POPSTACK(STACK) \
260 do { struct nesting *target = STACK; \
261 struct nesting *this; \
262 do { this = nesting_stack; \
263 if (loop_stack == this) \
264 loop_stack = loop_stack->next; \
265 if (cond_stack == this) \
266 cond_stack = cond_stack->next; \
267 if (block_stack == this) \
268 block_stack = block_stack->next; \
269 if (stack_block_stack == this) \
270 stack_block_stack = stack_block_stack->next; \
271 if (case_stack == this) \
272 case_stack = case_stack->next; \
273 nesting_depth = nesting_stack->depth - 1; \
274 nesting_stack = this->all; } \
275 while (this != target); } while (0)
277 /* In some cases it is impossible to generate code for a forward goto
278 until the label definition is seen. This happens when it may be necessary
279 for the goto to reset the stack pointer: we don't yet know how to do that.
280 So expand_goto puts an entry on this fixup list.
281 Each time a binding contour that resets the stack is exited,
282 we check each fixup.
283 If the target label has now been defined, we can insert the proper code. */
285 struct goto_fixup GTY(())
287 /* Points to following fixup. */
288 struct goto_fixup *next;
289 /* Points to the insn before the jump insn.
290 If more code must be inserted, it goes after this insn. */
291 rtx before_jump;
292 /* The LABEL_DECL that this jump is jumping to, or 0
293 for break, continue or return. */
294 tree target;
295 /* The BLOCK for the place where this goto was found. */
296 tree context;
297 /* The CODE_LABEL rtx that this is jumping to. */
298 rtx target_rtl;
299 /* Number of binding contours started in current function
300 before the label reference. */
301 int block_start_count;
302 /* The outermost stack level that should be restored for this jump.
303 Each time a binding contour that resets the stack is exited,
304 if the target label is *not* yet defined, this slot is updated. */
305 rtx stack_level;
306 /* List of lists of cleanup expressions to be run by this goto.
307 There is one element for each block that this goto is within.
308 The tail of this list can be 0,
309 if all remaining elements would be empty.
310 The TREE_VALUE contains the cleanup list of that block as of the
311 time this goto was seen.
312 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
313 tree cleanup_list_list;
316 /* Within any binding contour that must restore a stack level,
317 all labels are recorded with a chain of these structures. */
319 struct label_chain GTY(())
321 /* Points to following fixup. */
322 struct label_chain *next;
323 tree label;
326 struct stmt_status GTY(())
328 /* Chain of all pending binding contours. */
329 struct nesting * x_block_stack;
331 /* If any new stacks are added here, add them to POPSTACKS too. */
333 /* Chain of all pending binding contours that restore stack levels
334 or have cleanups. */
335 struct nesting * x_stack_block_stack;
337 /* Chain of all pending conditional statements. */
338 struct nesting * x_cond_stack;
340 /* Chain of all pending loops. */
341 struct nesting * x_loop_stack;
343 /* Chain of all pending case or switch statements. */
344 struct nesting * x_case_stack;
346 /* Separate chain including all of the above,
347 chained through the `all' field. */
348 struct nesting * x_nesting_stack;
350 /* Number of entries on nesting_stack now. */
351 int x_nesting_depth;
353 /* Number of binding contours started so far in this function. */
354 int x_block_start_count;
356 /* Each time we expand an expression-statement,
357 record the expr's type and its RTL value here. */
358 tree x_last_expr_type;
359 rtx x_last_expr_value;
360 rtx x_last_expr_alt_rtl;
362 /* Nonzero if within a ({...}) grouping, in which case we must
363 always compute a value for each expr-stmt in case it is the last one. */
364 int x_expr_stmts_for_value;
366 /* Location of last line-number note, whether we actually
367 emitted it or not. */
368 location_t x_emit_locus;
370 struct goto_fixup *x_goto_fixup_chain;
373 #define block_stack (cfun->stmt->x_block_stack)
374 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
375 #define cond_stack (cfun->stmt->x_cond_stack)
376 #define loop_stack (cfun->stmt->x_loop_stack)
377 #define case_stack (cfun->stmt->x_case_stack)
378 #define nesting_stack (cfun->stmt->x_nesting_stack)
379 #define nesting_depth (cfun->stmt->x_nesting_depth)
380 #define current_block_start_count (cfun->stmt->x_block_start_count)
381 #define last_expr_type (cfun->stmt->x_last_expr_type)
382 #define last_expr_value (cfun->stmt->x_last_expr_value)
383 #define last_expr_alt_rtl (cfun->stmt->x_last_expr_alt_rtl)
384 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
385 #define emit_locus (cfun->stmt->x_emit_locus)
386 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
388 /* Nonzero if we are using EH to handle cleanups. */
389 static int using_eh_for_cleanups_p = 0;
391 static int n_occurrences (int, const char *);
392 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
393 static void expand_goto_internal (tree, rtx, rtx);
394 static int expand_fixup (tree, rtx, rtx);
395 static rtx expand_nl_handler_label (rtx, rtx);
396 static void expand_nl_goto_receiver (void);
397 static void expand_nl_goto_receivers (struct nesting *);
398 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
399 static bool check_operand_nalternatives (tree, tree);
400 static bool check_unique_operand_names (tree, tree);
401 static char *resolve_operand_name_1 (char *, tree, tree);
402 static void expand_null_return_1 (rtx);
403 static enum br_predictor return_prediction (rtx);
404 static rtx shift_return_value (rtx);
405 static void expand_value_return (rtx);
406 static int tail_recursion_args (tree, tree);
407 static void expand_cleanups (tree, int, int);
408 static void check_seenlabel (void);
409 static void do_jump_if_equal (rtx, rtx, rtx, int);
410 static int estimate_case_costs (case_node_ptr);
411 static bool same_case_target_p (rtx, rtx);
412 static void strip_default_case_nodes (case_node_ptr *, rtx);
413 static bool lshift_cheap_p (void);
414 static int case_bit_test_cmp (const void *, const void *);
415 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
416 static void group_case_nodes (case_node_ptr);
417 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
418 static int node_has_low_bound (case_node_ptr, tree);
419 static int node_has_high_bound (case_node_ptr, tree);
420 static int node_is_bounded (case_node_ptr, tree);
421 static void emit_jump_if_reachable (rtx);
422 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
423 static struct case_node *case_tree2list (case_node *, case_node *);
425 void
426 using_eh_for_cleanups (void)
428 using_eh_for_cleanups_p = 1;
431 void
432 init_stmt_for_function (void)
434 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
437 /* Record the current file and line. Called from emit_line_note. */
439 void
440 set_file_and_line_for_stmt (location_t location)
442 /* If we're outputting an inline function, and we add a line note,
443 there may be no CFUN->STMT information. So, there's no need to
444 update it. */
445 if (cfun->stmt)
446 emit_locus = location;
449 /* Emit a no-op instruction. */
451 void
452 emit_nop (void)
454 rtx last_insn;
456 last_insn = get_last_insn ();
457 if (!optimize
458 && (GET_CODE (last_insn) == CODE_LABEL
459 || (GET_CODE (last_insn) == NOTE
460 && prev_real_insn (last_insn) == 0)))
461 emit_insn (gen_nop ());
464 /* Return the rtx-label that corresponds to a LABEL_DECL,
465 creating it if necessary. */
468 label_rtx (tree label)
470 if (TREE_CODE (label) != LABEL_DECL)
471 abort ();
473 if (!DECL_RTL_SET_P (label))
474 SET_DECL_RTL (label, gen_label_rtx ());
476 return DECL_RTL (label);
479 /* As above, but also put it on the forced-reference list of the
480 function that contains it. */
482 force_label_rtx (tree label)
484 rtx ref = label_rtx (label);
485 tree function = decl_function_context (label);
486 struct function *p;
488 if (!function)
489 abort ();
491 if (function != current_function_decl
492 && function != inline_function_decl)
493 p = find_function_data (function);
494 else
495 p = cfun;
497 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
498 p->expr->x_forced_labels);
499 return ref;
502 /* Add an unconditional jump to LABEL as the next sequential instruction. */
504 void
505 emit_jump (rtx label)
507 do_pending_stack_adjust ();
508 emit_jump_insn (gen_jump (label));
509 emit_barrier ();
512 /* Emit code to jump to the address
513 specified by the pointer expression EXP. */
515 void
516 expand_computed_goto (tree exp)
518 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
520 x = convert_memory_address (Pmode, x);
522 emit_queue ();
524 if (! cfun->computed_goto_common_label)
526 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
527 cfun->computed_goto_common_label = gen_label_rtx ();
528 emit_label (cfun->computed_goto_common_label);
530 do_pending_stack_adjust ();
531 emit_indirect_jump (cfun->computed_goto_common_reg);
533 current_function_has_computed_jump = 1;
535 else
537 emit_move_insn (cfun->computed_goto_common_reg, x);
538 emit_jump (cfun->computed_goto_common_label);
542 /* Handle goto statements and the labels that they can go to. */
544 /* Specify the location in the RTL code of a label LABEL,
545 which is a LABEL_DECL tree node.
547 This is used for the kind of label that the user can jump to with a
548 goto statement, and for alternatives of a switch or case statement.
549 RTL labels generated for loops and conditionals don't go through here;
550 they are generated directly at the RTL level, by other functions below.
552 Note that this has nothing to do with defining label *names*.
553 Languages vary in how they do that and what that even means. */
555 void
556 expand_label (tree label)
558 struct label_chain *p;
560 do_pending_stack_adjust ();
561 emit_label (label_rtx (label));
562 if (DECL_NAME (label))
563 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
565 if (stack_block_stack != 0)
567 p = ggc_alloc (sizeof (struct label_chain));
568 p->next = stack_block_stack->data.block.label_chain;
569 stack_block_stack->data.block.label_chain = p;
570 p->label = label;
574 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
575 from nested functions. */
577 void
578 declare_nonlocal_label (tree label)
580 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
582 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
583 LABEL_PRESERVE_P (label_rtx (label)) = 1;
584 if (nonlocal_goto_handler_slots == 0)
586 emit_stack_save (SAVE_NONLOCAL,
587 &nonlocal_goto_stack_level,
588 PREV_INSN (tail_recursion_reentry));
590 nonlocal_goto_handler_slots
591 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
594 /* Generate RTL code for a `goto' statement with target label LABEL.
595 LABEL should be a LABEL_DECL tree node that was or will later be
596 defined with `expand_label'. */
598 void
599 expand_goto (tree label)
601 tree context;
603 /* Check for a nonlocal goto to a containing function. */
604 context = decl_function_context (label);
605 if (context != 0 && context != current_function_decl)
607 struct function *p = find_function_data (context);
608 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
609 rtx handler_slot, static_chain, save_area, insn;
610 tree link;
612 /* Find the corresponding handler slot for this label. */
613 handler_slot = p->x_nonlocal_goto_handler_slots;
614 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
615 link = TREE_CHAIN (link))
616 handler_slot = XEXP (handler_slot, 1);
617 handler_slot = XEXP (handler_slot, 0);
619 p->has_nonlocal_label = 1;
620 current_function_has_nonlocal_goto = 1;
621 LABEL_REF_NONLOCAL_P (label_ref) = 1;
623 /* Copy the rtl for the slots so that they won't be shared in
624 case the virtual stack vars register gets instantiated differently
625 in the parent than in the child. */
627 static_chain = copy_to_reg (lookup_static_chain (label));
629 /* Get addr of containing function's current nonlocal goto handler,
630 which will do any cleanups and then jump to the label. */
631 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
632 virtual_stack_vars_rtx,
633 static_chain));
635 /* Get addr of containing function's nonlocal save area. */
636 save_area = p->x_nonlocal_goto_stack_level;
637 if (save_area)
638 save_area = replace_rtx (copy_rtx (save_area),
639 virtual_stack_vars_rtx, static_chain);
641 #if HAVE_nonlocal_goto
642 if (HAVE_nonlocal_goto)
643 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
644 save_area, label_ref));
645 else
646 #endif
648 emit_insn (gen_rtx_CLOBBER (VOIDmode,
649 gen_rtx_MEM (BLKmode,
650 gen_rtx_SCRATCH (VOIDmode))));
651 emit_insn (gen_rtx_CLOBBER (VOIDmode,
652 gen_rtx_MEM (BLKmode,
653 hard_frame_pointer_rtx)));
655 /* Restore frame pointer for containing function.
656 This sets the actual hard register used for the frame pointer
657 to the location of the function's incoming static chain info.
658 The non-local goto handler will then adjust it to contain the
659 proper value and reload the argument pointer, if needed. */
660 emit_move_insn (hard_frame_pointer_rtx, static_chain);
661 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
663 /* USE of hard_frame_pointer_rtx added for consistency;
664 not clear if really needed. */
665 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
666 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
667 emit_indirect_jump (handler_slot);
670 /* Search backwards to the jump insn and mark it as a
671 non-local goto. */
672 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
674 if (GET_CODE (insn) == JUMP_INSN)
676 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
677 const0_rtx, REG_NOTES (insn));
678 break;
680 else if (GET_CODE (insn) == CALL_INSN)
681 break;
684 else
685 expand_goto_internal (label, label_rtx (label), NULL_RTX);
688 /* Generate RTL code for a `goto' statement with target label BODY.
689 LABEL should be a LABEL_REF.
690 LAST_INSN, if non-0, is the rtx we should consider as the last
691 insn emitted (for the purposes of cleaning up a return). */
693 static void
694 expand_goto_internal (tree body, rtx label, rtx last_insn)
696 struct nesting *block;
697 rtx stack_level = 0;
699 if (GET_CODE (label) != CODE_LABEL)
700 abort ();
702 /* If label has already been defined, we can tell now
703 whether and how we must alter the stack level. */
705 if (PREV_INSN (label) != 0)
707 /* Find the innermost pending block that contains the label.
708 (Check containment by comparing insn-uids.)
709 Then restore the outermost stack level within that block,
710 and do cleanups of all blocks contained in it. */
711 for (block = block_stack; block; block = block->next)
713 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
714 break;
715 if (block->data.block.stack_level != 0)
716 stack_level = block->data.block.stack_level;
717 /* Execute the cleanups for blocks we are exiting. */
718 if (block->data.block.cleanups != 0)
720 expand_cleanups (block->data.block.cleanups, 1, 1);
721 do_pending_stack_adjust ();
725 if (stack_level)
727 /* Ensure stack adjust isn't done by emit_jump, as this
728 would clobber the stack pointer. This one should be
729 deleted as dead by flow. */
730 clear_pending_stack_adjust ();
731 do_pending_stack_adjust ();
733 /* Don't do this adjust if it's to the end label and this function
734 is to return with a depressed stack pointer. */
735 if (label == return_label
736 && (((TREE_CODE (TREE_TYPE (current_function_decl))
737 == FUNCTION_TYPE)
738 && (TYPE_RETURNS_STACK_DEPRESSED
739 (TREE_TYPE (current_function_decl))))))
741 else
742 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
745 if (body != 0 && DECL_TOO_LATE (body))
746 error ("jump to `%s' invalidly jumps into binding contour",
747 IDENTIFIER_POINTER (DECL_NAME (body)));
749 /* Label not yet defined: may need to put this goto
750 on the fixup list. */
751 else if (! expand_fixup (body, label, last_insn))
753 /* No fixup needed. Record that the label is the target
754 of at least one goto that has no fixup. */
755 if (body != 0)
756 TREE_ADDRESSABLE (body) = 1;
759 emit_jump (label);
762 /* Generate if necessary a fixup for a goto
763 whose target label in tree structure (if any) is TREE_LABEL
764 and whose target in rtl is RTL_LABEL.
766 If LAST_INSN is nonzero, we pretend that the jump appears
767 after insn LAST_INSN instead of at the current point in the insn stream.
769 The fixup will be used later to insert insns just before the goto.
770 Those insns will restore the stack level as appropriate for the
771 target label, and will (in the case of C++) also invoke any object
772 destructors which have to be invoked when we exit the scopes which
773 are exited by the goto.
775 Value is nonzero if a fixup is made. */
777 static int
778 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
780 struct nesting *block, *end_block;
782 /* See if we can recognize which block the label will be output in.
783 This is possible in some very common cases.
784 If we succeed, set END_BLOCK to that block.
785 Otherwise, set it to 0. */
787 if (cond_stack
788 && (rtl_label == cond_stack->data.cond.endif_label
789 || rtl_label == cond_stack->data.cond.next_label))
790 end_block = cond_stack;
791 /* If we are in a loop, recognize certain labels which
792 are likely targets. This reduces the number of fixups
793 we need to create. */
794 else if (loop_stack
795 && (rtl_label == loop_stack->data.loop.start_label
796 || rtl_label == loop_stack->data.loop.end_label
797 || rtl_label == loop_stack->data.loop.continue_label))
798 end_block = loop_stack;
799 else
800 end_block = 0;
802 /* Now set END_BLOCK to the binding level to which we will return. */
804 if (end_block)
806 struct nesting *next_block = end_block->all;
807 block = block_stack;
809 /* First see if the END_BLOCK is inside the innermost binding level.
810 If so, then no cleanups or stack levels are relevant. */
811 while (next_block && next_block != block)
812 next_block = next_block->all;
814 if (next_block)
815 return 0;
817 /* Otherwise, set END_BLOCK to the innermost binding level
818 which is outside the relevant control-structure nesting. */
819 next_block = block_stack->next;
820 for (block = block_stack; block != end_block; block = block->all)
821 if (block == next_block)
822 next_block = next_block->next;
823 end_block = next_block;
826 /* Does any containing block have a stack level or cleanups?
827 If not, no fixup is needed, and that is the normal case
828 (the only case, for standard C). */
829 for (block = block_stack; block != end_block; block = block->next)
830 if (block->data.block.stack_level != 0
831 || block->data.block.cleanups != 0)
832 break;
834 if (block != end_block)
836 /* Ok, a fixup is needed. Add a fixup to the list of such. */
837 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
838 /* In case an old stack level is restored, make sure that comes
839 after any pending stack adjust. */
840 /* ?? If the fixup isn't to come at the present position,
841 doing the stack adjust here isn't useful. Doing it with our
842 settings at that location isn't useful either. Let's hope
843 someone does it! */
844 if (last_insn == 0)
845 do_pending_stack_adjust ();
846 fixup->target = tree_label;
847 fixup->target_rtl = rtl_label;
849 /* Create a BLOCK node and a corresponding matched set of
850 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
851 this point. The notes will encapsulate any and all fixup
852 code which we might later insert at this point in the insn
853 stream. Also, the BLOCK node will be the parent (i.e. the
854 `SUPERBLOCK') of any other BLOCK nodes which we might create
855 later on when we are expanding the fixup code.
857 Note that optimization passes (including expand_end_loop)
858 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
859 as a placeholder. */
862 rtx original_before_jump
863 = last_insn ? last_insn : get_last_insn ();
864 rtx start;
865 rtx end;
866 tree block;
868 block = make_node (BLOCK);
869 TREE_USED (block) = 1;
871 if (!cfun->x_whole_function_mode_p)
872 (*lang_hooks.decls.insert_block) (block);
873 else
875 BLOCK_CHAIN (block)
876 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
877 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
878 = block;
881 start_sequence ();
882 start = emit_note (NOTE_INSN_BLOCK_BEG);
883 if (cfun->x_whole_function_mode_p)
884 NOTE_BLOCK (start) = block;
885 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
886 end = emit_note (NOTE_INSN_BLOCK_END);
887 if (cfun->x_whole_function_mode_p)
888 NOTE_BLOCK (end) = block;
889 fixup->context = block;
890 end_sequence ();
891 emit_insn_after (start, original_before_jump);
894 fixup->block_start_count = current_block_start_count;
895 fixup->stack_level = 0;
896 fixup->cleanup_list_list
897 = ((block->data.block.outer_cleanups
898 || block->data.block.cleanups)
899 ? tree_cons (NULL_TREE, block->data.block.cleanups,
900 block->data.block.outer_cleanups)
901 : 0);
902 fixup->next = goto_fixup_chain;
903 goto_fixup_chain = fixup;
906 return block != 0;
909 /* Expand any needed fixups in the outputmost binding level of the
910 function. FIRST_INSN is the first insn in the function. */
912 void
913 expand_fixups (rtx first_insn)
915 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
918 /* When exiting a binding contour, process all pending gotos requiring fixups.
919 THISBLOCK is the structure that describes the block being exited.
920 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
921 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
922 FIRST_INSN is the insn that began this contour.
924 Gotos that jump out of this contour must restore the
925 stack level and do the cleanups before actually jumping.
927 DONT_JUMP_IN positive means report error if there is a jump into this
928 contour from before the beginning of the contour. This is also done if
929 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
931 static void
932 fixup_gotos (struct nesting *thisblock, rtx stack_level,
933 tree cleanup_list, rtx first_insn, int dont_jump_in)
935 struct goto_fixup *f, *prev;
937 /* F is the fixup we are considering; PREV is the previous one. */
938 /* We run this loop in two passes so that cleanups of exited blocks
939 are run first, and blocks that are exited are marked so
940 afterwards. */
942 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
944 /* Test for a fixup that is inactive because it is already handled. */
945 if (f->before_jump == 0)
947 /* Delete inactive fixup from the chain, if that is easy to do. */
948 if (prev != 0)
949 prev->next = f->next;
951 /* Has this fixup's target label been defined?
952 If so, we can finalize it. */
953 else if (PREV_INSN (f->target_rtl) != 0)
955 rtx cleanup_insns;
957 /* If this fixup jumped into this contour from before the beginning
958 of this contour, report an error. This code used to use
959 the first non-label insn after f->target_rtl, but that's
960 wrong since such can be added, by things like put_var_into_stack
961 and have INSN_UIDs that are out of the range of the block. */
962 /* ??? Bug: this does not detect jumping in through intermediate
963 blocks that have stack levels or cleanups.
964 It detects only a problem with the innermost block
965 around the label. */
966 if (f->target != 0
967 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
968 || cleanup_list)
969 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
970 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
971 && ! DECL_ERROR_ISSUED (f->target))
973 error ("%Jlabel '%D' used before containing binding contour",
974 f->target, f->target);
975 /* Prevent multiple errors for one label. */
976 DECL_ERROR_ISSUED (f->target) = 1;
979 /* We will expand the cleanups into a sequence of their own and
980 then later on we will attach this new sequence to the insn
981 stream just ahead of the actual jump insn. */
983 start_sequence ();
985 /* Temporarily restore the lexical context where we will
986 logically be inserting the fixup code. We do this for the
987 sake of getting the debugging information right. */
989 (*lang_hooks.decls.pushlevel) (0);
990 (*lang_hooks.decls.set_block) (f->context);
992 /* Expand the cleanups for blocks this jump exits. */
993 if (f->cleanup_list_list)
995 tree lists;
996 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
997 /* Marked elements correspond to blocks that have been closed.
998 Do their cleanups. */
999 if (TREE_ADDRESSABLE (lists)
1000 && TREE_VALUE (lists) != 0)
1002 expand_cleanups (TREE_VALUE (lists), 1, 1);
1003 /* Pop any pushes done in the cleanups,
1004 in case function is about to return. */
1005 do_pending_stack_adjust ();
1009 /* Restore stack level for the biggest contour that this
1010 jump jumps out of. */
1011 if (f->stack_level
1012 && ! (f->target_rtl == return_label
1013 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1014 == FUNCTION_TYPE)
1015 && (TYPE_RETURNS_STACK_DEPRESSED
1016 (TREE_TYPE (current_function_decl))))))
1017 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1019 /* Finish up the sequence containing the insns which implement the
1020 necessary cleanups, and then attach that whole sequence to the
1021 insn stream just ahead of the actual jump insn. Attaching it
1022 at that point insures that any cleanups which are in fact
1023 implicit C++ object destructions (which must be executed upon
1024 leaving the block) appear (to the debugger) to be taking place
1025 in an area of the generated code where the object(s) being
1026 destructed are still "in scope". */
1028 cleanup_insns = get_insns ();
1029 (*lang_hooks.decls.poplevel) (1, 0, 0);
1031 end_sequence ();
1032 emit_insn_after (cleanup_insns, f->before_jump);
1034 f->before_jump = 0;
1038 /* For any still-undefined labels, do the cleanups for this block now.
1039 We must do this now since items in the cleanup list may go out
1040 of scope when the block ends. */
1041 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1042 if (f->before_jump != 0
1043 && PREV_INSN (f->target_rtl) == 0
1044 /* Label has still not appeared. If we are exiting a block with
1045 a stack level to restore, that started before the fixup,
1046 mark this stack level as needing restoration
1047 when the fixup is later finalized. */
1048 && thisblock != 0
1049 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1050 means the label is undefined. That's erroneous, but possible. */
1051 && (thisblock->data.block.block_start_count
1052 <= f->block_start_count))
1054 tree lists = f->cleanup_list_list;
1055 rtx cleanup_insns;
1057 for (; lists; lists = TREE_CHAIN (lists))
1058 /* If the following elt. corresponds to our containing block
1059 then the elt. must be for this block. */
1060 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1062 start_sequence ();
1063 (*lang_hooks.decls.pushlevel) (0);
1064 (*lang_hooks.decls.set_block) (f->context);
1065 expand_cleanups (TREE_VALUE (lists), 1, 1);
1066 do_pending_stack_adjust ();
1067 cleanup_insns = get_insns ();
1068 (*lang_hooks.decls.poplevel) (1, 0, 0);
1069 end_sequence ();
1070 if (cleanup_insns != 0)
1071 f->before_jump
1072 = emit_insn_after (cleanup_insns, f->before_jump);
1074 f->cleanup_list_list = TREE_CHAIN (lists);
1077 if (stack_level)
1078 f->stack_level = stack_level;
1082 /* Return the number of times character C occurs in string S. */
1083 static int
1084 n_occurrences (int c, const char *s)
1086 int n = 0;
1087 while (*s)
1088 n += (*s++ == c);
1089 return n;
1092 /* Generate RTL for an asm statement (explicit assembler code).
1093 STRING is a STRING_CST node containing the assembler code text,
1094 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1095 insn is volatile; don't optimize it. */
1097 void
1098 expand_asm (tree string, int vol)
1100 rtx body;
1102 if (TREE_CODE (string) == ADDR_EXPR)
1103 string = TREE_OPERAND (string, 0);
1105 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1107 MEM_VOLATILE_P (body) = vol;
1109 emit_insn (body);
1111 clear_last_expr ();
1114 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1115 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1116 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1117 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1118 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1119 constraint allows the use of a register operand. And, *IS_INOUT
1120 will be true if the operand is read-write, i.e., if it is used as
1121 an input as well as an output. If *CONSTRAINT_P is not in
1122 canonical form, it will be made canonical. (Note that `+' will be
1123 replaced with `=' as part of this process.)
1125 Returns TRUE if all went well; FALSE if an error occurred. */
1127 bool
1128 parse_output_constraint (const char **constraint_p, int operand_num,
1129 int ninputs, int noutputs, bool *allows_mem,
1130 bool *allows_reg, bool *is_inout)
1132 const char *constraint = *constraint_p;
1133 const char *p;
1135 /* Assume the constraint doesn't allow the use of either a register
1136 or memory. */
1137 *allows_mem = false;
1138 *allows_reg = false;
1140 /* Allow the `=' or `+' to not be at the beginning of the string,
1141 since it wasn't explicitly documented that way, and there is a
1142 large body of code that puts it last. Swap the character to
1143 the front, so as not to uglify any place else. */
1144 p = strchr (constraint, '=');
1145 if (!p)
1146 p = strchr (constraint, '+');
1148 /* If the string doesn't contain an `=', issue an error
1149 message. */
1150 if (!p)
1152 error ("output operand constraint lacks `='");
1153 return false;
1156 /* If the constraint begins with `+', then the operand is both read
1157 from and written to. */
1158 *is_inout = (*p == '+');
1160 /* Canonicalize the output constraint so that it begins with `='. */
1161 if (p != constraint || is_inout)
1163 char *buf;
1164 size_t c_len = strlen (constraint);
1166 if (p != constraint)
1167 warning ("output constraint `%c' for operand %d is not at the beginning",
1168 *p, operand_num);
1170 /* Make a copy of the constraint. */
1171 buf = alloca (c_len + 1);
1172 strcpy (buf, constraint);
1173 /* Swap the first character and the `=' or `+'. */
1174 buf[p - constraint] = buf[0];
1175 /* Make sure the first character is an `='. (Until we do this,
1176 it might be a `+'.) */
1177 buf[0] = '=';
1178 /* Replace the constraint with the canonicalized string. */
1179 *constraint_p = ggc_alloc_string (buf, c_len);
1180 constraint = *constraint_p;
1183 /* Loop through the constraint string. */
1184 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1185 switch (*p)
1187 case '+':
1188 case '=':
1189 error ("operand constraint contains incorrectly positioned '+' or '='");
1190 return false;
1192 case '%':
1193 if (operand_num + 1 == ninputs + noutputs)
1195 error ("`%%' constraint used with last operand");
1196 return false;
1198 break;
1200 case 'V': case 'm': case 'o':
1201 *allows_mem = true;
1202 break;
1204 case '?': case '!': case '*': case '&': case '#':
1205 case 'E': case 'F': case 'G': case 'H':
1206 case 's': case 'i': case 'n':
1207 case 'I': case 'J': case 'K': case 'L': case 'M':
1208 case 'N': case 'O': case 'P': case ',':
1209 break;
1211 case '0': case '1': case '2': case '3': case '4':
1212 case '5': case '6': case '7': case '8': case '9':
1213 case '[':
1214 error ("matching constraint not valid in output operand");
1215 return false;
1217 case '<': case '>':
1218 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1219 excepting those that expand_call created. So match memory
1220 and hope. */
1221 *allows_mem = true;
1222 break;
1224 case 'g': case 'X':
1225 *allows_reg = true;
1226 *allows_mem = true;
1227 break;
1229 case 'p': case 'r':
1230 *allows_reg = true;
1231 break;
1233 default:
1234 if (!ISALPHA (*p))
1235 break;
1236 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1237 *allows_reg = true;
1238 #ifdef EXTRA_CONSTRAINT_STR
1239 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1240 *allows_reg = true;
1241 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1242 *allows_mem = true;
1243 else
1245 /* Otherwise we can't assume anything about the nature of
1246 the constraint except that it isn't purely registers.
1247 Treat it like "g" and hope for the best. */
1248 *allows_reg = true;
1249 *allows_mem = true;
1251 #endif
1252 break;
1255 if (*is_inout && !*allows_reg)
1256 warning ("read-write constraint does not allow a register");
1258 return true;
1261 /* Similar, but for input constraints. */
1263 bool
1264 parse_input_constraint (const char **constraint_p, int input_num,
1265 int ninputs, int noutputs, int ninout,
1266 const char * const * constraints,
1267 bool *allows_mem, bool *allows_reg)
1269 const char *constraint = *constraint_p;
1270 const char *orig_constraint = constraint;
1271 size_t c_len = strlen (constraint);
1272 size_t j;
1273 bool saw_match = false;
1275 /* Assume the constraint doesn't allow the use of either
1276 a register or memory. */
1277 *allows_mem = false;
1278 *allows_reg = false;
1280 /* Make sure constraint has neither `=', `+', nor '&'. */
1282 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1283 switch (constraint[j])
1285 case '+': case '=': case '&':
1286 if (constraint == orig_constraint)
1288 error ("input operand constraint contains `%c'", constraint[j]);
1289 return false;
1291 break;
1293 case '%':
1294 if (constraint == orig_constraint
1295 && input_num + 1 == ninputs - ninout)
1297 error ("`%%' constraint used with last operand");
1298 return false;
1300 break;
1302 case 'V': case 'm': case 'o':
1303 *allows_mem = true;
1304 break;
1306 case '<': case '>':
1307 case '?': case '!': case '*': case '#':
1308 case 'E': case 'F': case 'G': case 'H':
1309 case 's': case 'i': case 'n':
1310 case 'I': case 'J': case 'K': case 'L': case 'M':
1311 case 'N': case 'O': case 'P': case ',':
1312 break;
1314 /* Whether or not a numeric constraint allows a register is
1315 decided by the matching constraint, and so there is no need
1316 to do anything special with them. We must handle them in
1317 the default case, so that we don't unnecessarily force
1318 operands to memory. */
1319 case '0': case '1': case '2': case '3': case '4':
1320 case '5': case '6': case '7': case '8': case '9':
1322 char *end;
1323 unsigned long match;
1325 saw_match = true;
1327 match = strtoul (constraint + j, &end, 10);
1328 if (match >= (unsigned long) noutputs)
1330 error ("matching constraint references invalid operand number");
1331 return false;
1334 /* Try and find the real constraint for this dup. Only do this
1335 if the matching constraint is the only alternative. */
1336 if (*end == '\0'
1337 && (j == 0 || (j == 1 && constraint[0] == '%')))
1339 constraint = constraints[match];
1340 *constraint_p = constraint;
1341 c_len = strlen (constraint);
1342 j = 0;
1343 /* ??? At the end of the loop, we will skip the first part of
1344 the matched constraint. This assumes not only that the
1345 other constraint is an output constraint, but also that
1346 the '=' or '+' come first. */
1347 break;
1349 else
1350 j = end - constraint;
1351 /* Anticipate increment at end of loop. */
1352 j--;
1354 /* Fall through. */
1356 case 'p': case 'r':
1357 *allows_reg = true;
1358 break;
1360 case 'g': case 'X':
1361 *allows_reg = true;
1362 *allows_mem = true;
1363 break;
1365 default:
1366 if (! ISALPHA (constraint[j]))
1368 error ("invalid punctuation `%c' in constraint", constraint[j]);
1369 return false;
1371 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1372 != NO_REGS)
1373 *allows_reg = true;
1374 #ifdef EXTRA_CONSTRAINT_STR
1375 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1376 *allows_reg = true;
1377 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1378 *allows_mem = true;
1379 else
1381 /* Otherwise we can't assume anything about the nature of
1382 the constraint except that it isn't purely registers.
1383 Treat it like "g" and hope for the best. */
1384 *allows_reg = true;
1385 *allows_mem = true;
1387 #endif
1388 break;
1391 if (saw_match && !*allows_reg)
1392 warning ("matching constraint does not allow a register");
1394 return true;
1397 /* Check for overlap between registers marked in CLOBBERED_REGS and
1398 anything inappropriate in DECL. Emit error and return TRUE for error,
1399 FALSE for ok. */
1401 static bool
1402 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1404 /* Conflicts between asm-declared register variables and the clobber
1405 list are not allowed. */
1406 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1407 && DECL_REGISTER (decl)
1408 && REG_P (DECL_RTL (decl))
1409 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1411 rtx reg = DECL_RTL (decl);
1412 unsigned int regno;
1414 for (regno = REGNO (reg);
1415 regno < (REGNO (reg)
1416 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
1417 regno++)
1418 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1420 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1421 IDENTIFIER_POINTER (DECL_NAME (decl)));
1423 /* Reset registerness to stop multiple errors emitted for a
1424 single variable. */
1425 DECL_REGISTER (decl) = 0;
1426 return true;
1429 return false;
1432 /* Generate RTL for an asm statement with arguments.
1433 STRING is the instruction template.
1434 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1435 Each output or input has an expression in the TREE_VALUE and
1436 and a tree list in TREE_PURPOSE which in turn contains a constraint
1437 name in TREE_VALUE (or NULL_TREE) and a constraint string
1438 in TREE_PURPOSE.
1439 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1440 that is clobbered by this insn.
1442 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1443 Some elements of OUTPUTS may be replaced with trees representing temporary
1444 values. The caller should copy those temporary values to the originally
1445 specified lvalues.
1447 VOL nonzero means the insn is volatile; don't optimize it. */
1449 void
1450 expand_asm_operands (tree string, tree outputs, tree inputs,
1451 tree clobbers, int vol, location_t locus)
1453 rtvec argvec, constraintvec;
1454 rtx body;
1455 int ninputs = list_length (inputs);
1456 int noutputs = list_length (outputs);
1457 int ninout;
1458 int nclobbers;
1459 HARD_REG_SET clobbered_regs;
1460 int clobber_conflict_found = 0;
1461 tree tail;
1462 tree t;
1463 int i;
1464 /* Vector of RTX's of evaluated output operands. */
1465 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1466 int *inout_opnum = alloca (noutputs * sizeof (int));
1467 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1468 enum machine_mode *inout_mode
1469 = alloca (noutputs * sizeof (enum machine_mode));
1470 const char **constraints
1471 = alloca ((noutputs + ninputs) * sizeof (const char *));
1472 int old_generating_concat_p = generating_concat_p;
1474 /* An ASM with no outputs needs to be treated as volatile, for now. */
1475 if (noutputs == 0)
1476 vol = 1;
1478 if (! check_operand_nalternatives (outputs, inputs))
1479 return;
1481 string = resolve_asm_operand_names (string, outputs, inputs);
1483 /* Collect constraints. */
1484 i = 0;
1485 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1486 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1487 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1488 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1490 #ifdef MD_ASM_CLOBBERS
1491 /* Sometimes we wish to automatically clobber registers across an asm.
1492 Case in point is when the i386 backend moved from cc0 to a hard reg --
1493 maintaining source-level compatibility means automatically clobbering
1494 the flags register. */
1495 MD_ASM_CLOBBERS (clobbers);
1496 #endif
1498 /* Count the number of meaningful clobbered registers, ignoring what
1499 we would ignore later. */
1500 nclobbers = 0;
1501 CLEAR_HARD_REG_SET (clobbered_regs);
1502 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1504 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1506 i = decode_reg_name (regname);
1507 if (i >= 0 || i == -4)
1508 ++nclobbers;
1509 else if (i == -2)
1510 error ("unknown register name `%s' in `asm'", regname);
1512 /* Mark clobbered registers. */
1513 if (i >= 0)
1515 /* Clobbering the PIC register is an error */
1516 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1518 error ("PIC register `%s' clobbered in `asm'", regname);
1519 return;
1522 SET_HARD_REG_BIT (clobbered_regs, i);
1526 clear_last_expr ();
1528 /* First pass over inputs and outputs checks validity and sets
1529 mark_addressable if needed. */
1531 ninout = 0;
1532 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1534 tree val = TREE_VALUE (tail);
1535 tree type = TREE_TYPE (val);
1536 const char *constraint;
1537 bool is_inout;
1538 bool allows_reg;
1539 bool allows_mem;
1541 /* If there's an erroneous arg, emit no insn. */
1542 if (type == error_mark_node)
1543 return;
1545 /* Try to parse the output constraint. If that fails, there's
1546 no point in going further. */
1547 constraint = constraints[i];
1548 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1549 &allows_mem, &allows_reg, &is_inout))
1550 return;
1552 if (! allows_reg
1553 && (allows_mem
1554 || is_inout
1555 || (DECL_P (val)
1556 && GET_CODE (DECL_RTL (val)) == REG
1557 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1558 (*lang_hooks.mark_addressable) (val);
1560 if (is_inout)
1561 ninout++;
1564 ninputs += ninout;
1565 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1567 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1568 return;
1571 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1573 bool allows_reg, allows_mem;
1574 const char *constraint;
1576 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1577 would get VOIDmode and that could cause a crash in reload. */
1578 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1579 return;
1581 constraint = constraints[i + noutputs];
1582 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1583 constraints, &allows_mem, &allows_reg))
1584 return;
1586 if (! allows_reg && allows_mem)
1587 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1590 /* Second pass evaluates arguments. */
1592 ninout = 0;
1593 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1595 tree val = TREE_VALUE (tail);
1596 tree type = TREE_TYPE (val);
1597 bool is_inout;
1598 bool allows_reg;
1599 bool allows_mem;
1600 rtx op;
1602 if (!parse_output_constraint (&constraints[i], i, ninputs,
1603 noutputs, &allows_mem, &allows_reg,
1604 &is_inout))
1605 abort ();
1607 /* If an output operand is not a decl or indirect ref and our constraint
1608 allows a register, make a temporary to act as an intermediate.
1609 Make the asm insn write into that, then our caller will copy it to
1610 the real output operand. Likewise for promoted variables. */
1612 generating_concat_p = 0;
1614 real_output_rtx[i] = NULL_RTX;
1615 if ((TREE_CODE (val) == INDIRECT_REF
1616 && allows_mem)
1617 || (DECL_P (val)
1618 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1619 && ! (GET_CODE (DECL_RTL (val)) == REG
1620 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1621 || ! allows_reg
1622 || is_inout)
1624 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1625 if (GET_CODE (op) == MEM)
1626 op = validize_mem (op);
1628 if (! allows_reg && GET_CODE (op) != MEM)
1629 error ("output number %d not directly addressable", i);
1630 if ((! allows_mem && GET_CODE (op) == MEM)
1631 || GET_CODE (op) == CONCAT)
1633 real_output_rtx[i] = protect_from_queue (op, 1);
1634 op = gen_reg_rtx (GET_MODE (op));
1635 if (is_inout)
1636 emit_move_insn (op, real_output_rtx[i]);
1639 else
1641 op = assign_temp (type, 0, 0, 1);
1642 op = validize_mem (op);
1643 TREE_VALUE (tail) = make_tree (type, op);
1645 output_rtx[i] = op;
1647 generating_concat_p = old_generating_concat_p;
1649 if (is_inout)
1651 inout_mode[ninout] = TYPE_MODE (type);
1652 inout_opnum[ninout++] = i;
1655 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1656 clobber_conflict_found = 1;
1659 /* Make vectors for the expression-rtx, constraint strings,
1660 and named operands. */
1662 argvec = rtvec_alloc (ninputs);
1663 constraintvec = rtvec_alloc (ninputs);
1665 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1666 : GET_MODE (output_rtx[0])),
1667 TREE_STRING_POINTER (string),
1668 empty_string, 0, argvec, constraintvec,
1669 locus.file, locus.line);
1671 MEM_VOLATILE_P (body) = vol;
1673 /* Eval the inputs and put them into ARGVEC.
1674 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1676 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1678 bool allows_reg, allows_mem;
1679 const char *constraint;
1680 tree val, type;
1681 rtx op;
1683 constraint = constraints[i + noutputs];
1684 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1685 constraints, &allows_mem, &allows_reg))
1686 abort ();
1688 generating_concat_p = 0;
1690 val = TREE_VALUE (tail);
1691 type = TREE_TYPE (val);
1692 op = expand_expr (val, NULL_RTX, VOIDmode,
1693 (allows_mem && !allows_reg
1694 ? EXPAND_MEMORY : EXPAND_NORMAL));
1696 /* Never pass a CONCAT to an ASM. */
1697 if (GET_CODE (op) == CONCAT)
1698 op = force_reg (GET_MODE (op), op);
1699 else if (GET_CODE (op) == MEM)
1700 op = validize_mem (op);
1702 if (asm_operand_ok (op, constraint) <= 0)
1704 if (allows_reg)
1705 op = force_reg (TYPE_MODE (type), op);
1706 else if (!allows_mem)
1707 warning ("asm operand %d probably doesn't match constraints",
1708 i + noutputs);
1709 else if (GET_CODE (op) == MEM)
1711 /* We won't recognize either volatile memory or memory
1712 with a queued address as available a memory_operand
1713 at this point. Ignore it: clearly this *is* a memory. */
1715 else
1717 warning ("use of memory input without lvalue in "
1718 "asm operand %d is deprecated", i + noutputs);
1720 if (CONSTANT_P (op))
1722 rtx mem = force_const_mem (TYPE_MODE (type), op);
1723 if (mem)
1724 op = validize_mem (mem);
1725 else
1726 op = force_reg (TYPE_MODE (type), op);
1728 if (GET_CODE (op) == REG
1729 || GET_CODE (op) == SUBREG
1730 || GET_CODE (op) == ADDRESSOF
1731 || GET_CODE (op) == CONCAT)
1733 tree qual_type = build_qualified_type (type,
1734 (TYPE_QUALS (type)
1735 | TYPE_QUAL_CONST));
1736 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1737 memloc = validize_mem (memloc);
1738 emit_move_insn (memloc, op);
1739 op = memloc;
1744 generating_concat_p = old_generating_concat_p;
1745 ASM_OPERANDS_INPUT (body, i) = op;
1747 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1748 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1750 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1751 clobber_conflict_found = 1;
1754 /* Protect all the operands from the queue now that they have all been
1755 evaluated. */
1757 generating_concat_p = 0;
1759 for (i = 0; i < ninputs - ninout; i++)
1760 ASM_OPERANDS_INPUT (body, i)
1761 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1763 for (i = 0; i < noutputs; i++)
1764 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1766 /* For in-out operands, copy output rtx to input rtx. */
1767 for (i = 0; i < ninout; i++)
1769 int j = inout_opnum[i];
1770 char buffer[16];
1772 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1773 = output_rtx[j];
1775 sprintf (buffer, "%d", j);
1776 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1777 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1780 generating_concat_p = old_generating_concat_p;
1782 /* Now, for each output, construct an rtx
1783 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1784 ARGVEC CONSTRAINTS OPNAMES))
1785 If there is more than one, put them inside a PARALLEL. */
1787 if (noutputs == 1 && nclobbers == 0)
1789 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1790 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1793 else if (noutputs == 0 && nclobbers == 0)
1795 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1796 emit_insn (body);
1799 else
1801 rtx obody = body;
1802 int num = noutputs;
1804 if (num == 0)
1805 num = 1;
1807 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1809 /* For each output operand, store a SET. */
1810 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1812 XVECEXP (body, 0, i)
1813 = gen_rtx_SET (VOIDmode,
1814 output_rtx[i],
1815 gen_rtx_ASM_OPERANDS
1816 (GET_MODE (output_rtx[i]),
1817 TREE_STRING_POINTER (string),
1818 constraints[i], i, argvec, constraintvec,
1819 locus.file, locus.line));
1821 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1824 /* If there are no outputs (but there are some clobbers)
1825 store the bare ASM_OPERANDS into the PARALLEL. */
1827 if (i == 0)
1828 XVECEXP (body, 0, i++) = obody;
1830 /* Store (clobber REG) for each clobbered register specified. */
1832 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1834 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1835 int j = decode_reg_name (regname);
1836 rtx clobbered_reg;
1838 if (j < 0)
1840 if (j == -3) /* `cc', which is not a register */
1841 continue;
1843 if (j == -4) /* `memory', don't cache memory across asm */
1845 XVECEXP (body, 0, i++)
1846 = gen_rtx_CLOBBER (VOIDmode,
1847 gen_rtx_MEM
1848 (BLKmode,
1849 gen_rtx_SCRATCH (VOIDmode)));
1850 continue;
1853 /* Ignore unknown register, error already signaled. */
1854 continue;
1857 /* Use QImode since that's guaranteed to clobber just one reg. */
1858 clobbered_reg = gen_rtx_REG (QImode, j);
1860 /* Do sanity check for overlap between clobbers and respectively
1861 input and outputs that hasn't been handled. Such overlap
1862 should have been detected and reported above. */
1863 if (!clobber_conflict_found)
1865 int opno;
1867 /* We test the old body (obody) contents to avoid tripping
1868 over the under-construction body. */
1869 for (opno = 0; opno < noutputs; opno++)
1870 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1871 internal_error ("asm clobber conflict with output operand");
1873 for (opno = 0; opno < ninputs - ninout; opno++)
1874 if (reg_overlap_mentioned_p (clobbered_reg,
1875 ASM_OPERANDS_INPUT (obody, opno)))
1876 internal_error ("asm clobber conflict with input operand");
1879 XVECEXP (body, 0, i++)
1880 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1883 emit_insn (body);
1886 /* For any outputs that needed reloading into registers, spill them
1887 back to where they belong. */
1888 for (i = 0; i < noutputs; ++i)
1889 if (real_output_rtx[i])
1890 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1892 free_temp_slots ();
1895 /* A subroutine of expand_asm_operands. Check that all operands have
1896 the same number of alternatives. Return true if so. */
1898 static bool
1899 check_operand_nalternatives (tree outputs, tree inputs)
1901 if (outputs || inputs)
1903 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1904 int nalternatives
1905 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1906 tree next = inputs;
1908 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1910 error ("too many alternatives in `asm'");
1911 return false;
1914 tmp = outputs;
1915 while (tmp)
1917 const char *constraint
1918 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1920 if (n_occurrences (',', constraint) != nalternatives)
1922 error ("operand constraints for `asm' differ in number of alternatives");
1923 return false;
1926 if (TREE_CHAIN (tmp))
1927 tmp = TREE_CHAIN (tmp);
1928 else
1929 tmp = next, next = 0;
1933 return true;
1936 /* A subroutine of expand_asm_operands. Check that all operand names
1937 are unique. Return true if so. We rely on the fact that these names
1938 are identifiers, and so have been canonicalized by get_identifier,
1939 so all we need are pointer comparisons. */
1941 static bool
1942 check_unique_operand_names (tree outputs, tree inputs)
1944 tree i, j;
1946 for (i = outputs; i ; i = TREE_CHAIN (i))
1948 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1949 if (! i_name)
1950 continue;
1952 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1953 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1954 goto failure;
1957 for (i = inputs; i ; i = TREE_CHAIN (i))
1959 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1960 if (! i_name)
1961 continue;
1963 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1964 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1965 goto failure;
1966 for (j = outputs; j ; j = TREE_CHAIN (j))
1967 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1968 goto failure;
1971 return true;
1973 failure:
1974 error ("duplicate asm operand name '%s'",
1975 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1976 return false;
1979 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1980 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1981 STRING and in the constraints to those numbers. */
1983 tree
1984 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1986 char *buffer;
1987 char *p;
1988 const char *c;
1989 tree t;
1991 check_unique_operand_names (outputs, inputs);
1993 /* Substitute [<name>] in input constraint strings. There should be no
1994 named operands in output constraints. */
1995 for (t = inputs; t ; t = TREE_CHAIN (t))
1997 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1998 if (strchr (c, '[') != NULL)
2000 p = buffer = xstrdup (c);
2001 while ((p = strchr (p, '[')) != NULL)
2002 p = resolve_operand_name_1 (p, outputs, inputs);
2003 TREE_VALUE (TREE_PURPOSE (t))
2004 = build_string (strlen (buffer), buffer);
2005 free (buffer);
2009 /* Now check for any needed substitutions in the template. */
2010 c = TREE_STRING_POINTER (string);
2011 while ((c = strchr (c, '%')) != NULL)
2013 if (c[1] == '[')
2014 break;
2015 else if (ISALPHA (c[1]) && c[2] == '[')
2016 break;
2017 else
2019 c += 1;
2020 continue;
2024 if (c)
2026 /* OK, we need to make a copy so we can perform the substitutions.
2027 Assume that we will not need extra space--we get to remove '['
2028 and ']', which means we cannot have a problem until we have more
2029 than 999 operands. */
2030 buffer = xstrdup (TREE_STRING_POINTER (string));
2031 p = buffer + (c - TREE_STRING_POINTER (string));
2033 while ((p = strchr (p, '%')) != NULL)
2035 if (p[1] == '[')
2036 p += 1;
2037 else if (ISALPHA (p[1]) && p[2] == '[')
2038 p += 2;
2039 else
2041 p += 1;
2042 continue;
2045 p = resolve_operand_name_1 (p, outputs, inputs);
2048 string = build_string (strlen (buffer), buffer);
2049 free (buffer);
2052 return string;
2055 /* A subroutine of resolve_operand_names. P points to the '[' for a
2056 potential named operand of the form [<name>]. In place, replace
2057 the name and brackets with a number. Return a pointer to the
2058 balance of the string after substitution. */
2060 static char *
2061 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2063 char *q;
2064 int op;
2065 tree t;
2066 size_t len;
2068 /* Collect the operand name. */
2069 q = strchr (p, ']');
2070 if (!q)
2072 error ("missing close brace for named operand");
2073 return strchr (p, '\0');
2075 len = q - p - 1;
2077 /* Resolve the name to a number. */
2078 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2080 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2081 if (name)
2083 const char *c = TREE_STRING_POINTER (name);
2084 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2085 goto found;
2088 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2090 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2091 if (name)
2093 const char *c = TREE_STRING_POINTER (name);
2094 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2095 goto found;
2099 *q = '\0';
2100 error ("undefined named operand '%s'", p + 1);
2101 op = 0;
2102 found:
2104 /* Replace the name with the number. Unfortunately, not all libraries
2105 get the return value of sprintf correct, so search for the end of the
2106 generated string by hand. */
2107 sprintf (p, "%d", op);
2108 p = strchr (p, '\0');
2110 /* Verify the no extra buffer space assumption. */
2111 if (p > q)
2112 abort ();
2114 /* Shift the rest of the buffer down to fill the gap. */
2115 memmove (p, q + 1, strlen (q + 1) + 1);
2117 return p;
2120 /* Generate RTL to evaluate the expression EXP
2121 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2122 Provided just for backward-compatibility. expand_expr_stmt_value()
2123 should be used for new code. */
2125 void
2126 expand_expr_stmt (tree exp)
2128 expand_expr_stmt_value (exp, -1, 1);
2131 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2132 whether to (1) save the value of the expression, (0) discard it or
2133 (-1) use expr_stmts_for_value to tell. The use of -1 is
2134 deprecated, and retained only for backward compatibility. */
2136 void
2137 expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2139 rtx value;
2140 tree type;
2141 rtx alt_rtl = NULL;
2143 if (want_value == -1)
2144 want_value = expr_stmts_for_value != 0;
2146 /* If -Wextra, warn about statements with no side effects,
2147 except for an explicit cast to void (e.g. for assert()), and
2148 except for last statement in ({...}) where they may be useful. */
2149 if (! want_value
2150 && (expr_stmts_for_value == 0 || ! maybe_last)
2151 && exp != error_mark_node
2152 && warn_unused_value)
2154 if (TREE_SIDE_EFFECTS (exp))
2155 warn_if_unused_value (exp);
2156 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
2157 warning ("%Hstatement with no effect", &emit_locus);
2160 /* If EXP is of function type and we are expanding statements for
2161 value, convert it to pointer-to-function. */
2162 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2163 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2165 /* The call to `expand_expr' could cause last_expr_type and
2166 last_expr_value to get reset. Therefore, we set last_expr_value
2167 and last_expr_type *after* calling expand_expr. */
2168 value = expand_expr_real (exp, want_value ? NULL_RTX : const0_rtx,
2169 VOIDmode, 0, &alt_rtl);
2170 type = TREE_TYPE (exp);
2172 /* If all we do is reference a volatile value in memory,
2173 copy it to a register to be sure it is actually touched. */
2174 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2176 if (TYPE_MODE (type) == VOIDmode)
2178 else if (TYPE_MODE (type) != BLKmode)
2179 value = copy_to_reg (value);
2180 else
2182 rtx lab = gen_label_rtx ();
2184 /* Compare the value with itself to reference it. */
2185 emit_cmp_and_jump_insns (value, value, EQ,
2186 expand_expr (TYPE_SIZE (type),
2187 NULL_RTX, VOIDmode, 0),
2188 BLKmode, 0, lab);
2189 emit_label (lab);
2193 /* If this expression is part of a ({...}) and is in memory, we may have
2194 to preserve temporaries. */
2195 preserve_temp_slots (value);
2197 /* Free any temporaries used to evaluate this expression. Any temporary
2198 used as a result of this expression will already have been preserved
2199 above. */
2200 free_temp_slots ();
2202 if (want_value)
2204 last_expr_value = value;
2205 last_expr_alt_rtl = alt_rtl;
2206 last_expr_type = type;
2209 emit_queue ();
2212 /* Warn if EXP contains any computations whose results are not used.
2213 Return 1 if a warning is printed; 0 otherwise. */
2216 warn_if_unused_value (tree exp)
2218 if (TREE_USED (exp))
2219 return 0;
2221 /* Don't warn about void constructs. This includes casting to void,
2222 void function calls, and statement expressions with a final cast
2223 to void. */
2224 if (VOID_TYPE_P (TREE_TYPE (exp)))
2225 return 0;
2227 switch (TREE_CODE (exp))
2229 case PREINCREMENT_EXPR:
2230 case POSTINCREMENT_EXPR:
2231 case PREDECREMENT_EXPR:
2232 case POSTDECREMENT_EXPR:
2233 case MODIFY_EXPR:
2234 case INIT_EXPR:
2235 case TARGET_EXPR:
2236 case CALL_EXPR:
2237 case RTL_EXPR:
2238 case TRY_CATCH_EXPR:
2239 case WITH_CLEANUP_EXPR:
2240 case EXIT_EXPR:
2241 return 0;
2243 case BIND_EXPR:
2244 /* For a binding, warn if no side effect within it. */
2245 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2247 case SAVE_EXPR:
2248 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2250 case TRUTH_ORIF_EXPR:
2251 case TRUTH_ANDIF_EXPR:
2252 /* In && or ||, warn if 2nd operand has no side effect. */
2253 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2255 case COMPOUND_EXPR:
2256 if (TREE_NO_UNUSED_WARNING (exp))
2257 return 0;
2258 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2259 return 1;
2260 /* Let people do `(foo (), 0)' without a warning. */
2261 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2262 return 0;
2263 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2265 case NOP_EXPR:
2266 case CONVERT_EXPR:
2267 case NON_LVALUE_EXPR:
2268 /* Don't warn about conversions not explicit in the user's program. */
2269 if (TREE_NO_UNUSED_WARNING (exp))
2270 return 0;
2271 /* Assignment to a cast usually results in a cast of a modify.
2272 Don't complain about that. There can be an arbitrary number of
2273 casts before the modify, so we must loop until we find the first
2274 non-cast expression and then test to see if that is a modify. */
2276 tree tem = TREE_OPERAND (exp, 0);
2278 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2279 tem = TREE_OPERAND (tem, 0);
2281 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2282 || TREE_CODE (tem) == CALL_EXPR)
2283 return 0;
2285 goto maybe_warn;
2287 case INDIRECT_REF:
2288 /* Don't warn about automatic dereferencing of references, since
2289 the user cannot control it. */
2290 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2291 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2292 /* Fall through. */
2294 default:
2295 /* Referencing a volatile value is a side effect, so don't warn. */
2296 if ((DECL_P (exp)
2297 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2298 && TREE_THIS_VOLATILE (exp))
2299 return 0;
2301 /* If this is an expression which has no operands, there is no value
2302 to be unused. There are no such language-independent codes,
2303 but front ends may define such. */
2304 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2305 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2306 return 0;
2308 maybe_warn:
2309 /* If this is an expression with side effects, don't warn. */
2310 if (TREE_SIDE_EFFECTS (exp))
2311 return 0;
2313 warning ("%Hvalue computed is not used", &emit_locus);
2314 return 1;
2318 /* Clear out the memory of the last expression evaluated. */
2320 void
2321 clear_last_expr (void)
2323 last_expr_type = NULL_TREE;
2324 last_expr_value = NULL_RTX;
2325 last_expr_alt_rtl = NULL_RTX;
2328 /* Begin a statement-expression, i.e., a series of statements which
2329 may return a value. Return the RTL_EXPR for this statement expr.
2330 The caller must save that value and pass it to
2331 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2332 in the statement-expression are deallocated at the end of the
2333 expression. */
2335 tree
2336 expand_start_stmt_expr (int has_scope)
2338 tree t;
2340 /* Make the RTL_EXPR node temporary, not momentary,
2341 so that rtl_expr_chain doesn't become garbage. */
2342 t = make_node (RTL_EXPR);
2343 do_pending_stack_adjust ();
2344 if (has_scope)
2345 start_sequence_for_rtl_expr (t);
2346 else
2347 start_sequence ();
2348 NO_DEFER_POP;
2349 expr_stmts_for_value++;
2350 return t;
2353 /* Restore the previous state at the end of a statement that returns a value.
2354 Returns a tree node representing the statement's value and the
2355 insns to compute the value.
2357 The nodes of that expression have been freed by now, so we cannot use them.
2358 But we don't want to do that anyway; the expression has already been
2359 evaluated and now we just want to use the value. So generate a RTL_EXPR
2360 with the proper type and RTL value.
2362 If the last substatement was not an expression,
2363 return something with type `void'. */
2365 tree
2366 expand_end_stmt_expr (tree t)
2368 OK_DEFER_POP;
2370 if (! last_expr_value || ! last_expr_type)
2372 last_expr_value = const0_rtx;
2373 last_expr_alt_rtl = NULL_RTX;
2374 last_expr_type = void_type_node;
2376 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2377 /* Remove any possible QUEUED. */
2378 last_expr_value = protect_from_queue (last_expr_value, 0);
2380 emit_queue ();
2382 TREE_TYPE (t) = last_expr_type;
2383 RTL_EXPR_RTL (t) = last_expr_value;
2384 RTL_EXPR_ALT_RTL (t) = last_expr_alt_rtl;
2385 RTL_EXPR_SEQUENCE (t) = get_insns ();
2387 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2389 end_sequence ();
2391 /* Don't consider deleting this expr or containing exprs at tree level. */
2392 TREE_SIDE_EFFECTS (t) = 1;
2393 /* Propagate volatility of the actual RTL expr. */
2394 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2396 clear_last_expr ();
2397 expr_stmts_for_value--;
2399 return t;
2402 /* Generate RTL for the start of an if-then. COND is the expression
2403 whose truth should be tested.
2405 If EXITFLAG is nonzero, this conditional is visible to
2406 `exit_something'. */
2408 void
2409 expand_start_cond (tree cond, int exitflag)
2411 struct nesting *thiscond = ALLOC_NESTING ();
2413 /* Make an entry on cond_stack for the cond we are entering. */
2415 thiscond->desc = COND_NESTING;
2416 thiscond->next = cond_stack;
2417 thiscond->all = nesting_stack;
2418 thiscond->depth = ++nesting_depth;
2419 thiscond->data.cond.next_label = gen_label_rtx ();
2420 /* Before we encounter an `else', we don't need a separate exit label
2421 unless there are supposed to be exit statements
2422 to exit this conditional. */
2423 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2424 thiscond->data.cond.endif_label = thiscond->exit_label;
2425 cond_stack = thiscond;
2426 nesting_stack = thiscond;
2428 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2431 /* Generate RTL between then-clause and the elseif-clause
2432 of an if-then-elseif-.... */
2434 void
2435 expand_start_elseif (tree cond)
2437 if (cond_stack->data.cond.endif_label == 0)
2438 cond_stack->data.cond.endif_label = gen_label_rtx ();
2439 emit_jump (cond_stack->data.cond.endif_label);
2440 emit_label (cond_stack->data.cond.next_label);
2441 cond_stack->data.cond.next_label = gen_label_rtx ();
2442 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2445 /* Generate RTL between the then-clause and the else-clause
2446 of an if-then-else. */
2448 void
2449 expand_start_else (void)
2451 if (cond_stack->data.cond.endif_label == 0)
2452 cond_stack->data.cond.endif_label = gen_label_rtx ();
2454 emit_jump (cond_stack->data.cond.endif_label);
2455 emit_label (cond_stack->data.cond.next_label);
2456 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2459 /* After calling expand_start_else, turn this "else" into an "else if"
2460 by providing another condition. */
2462 void
2463 expand_elseif (tree cond)
2465 cond_stack->data.cond.next_label = gen_label_rtx ();
2466 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2469 /* Generate RTL for the end of an if-then.
2470 Pop the record for it off of cond_stack. */
2472 void
2473 expand_end_cond (void)
2475 struct nesting *thiscond = cond_stack;
2477 do_pending_stack_adjust ();
2478 if (thiscond->data.cond.next_label)
2479 emit_label (thiscond->data.cond.next_label);
2480 if (thiscond->data.cond.endif_label)
2481 emit_label (thiscond->data.cond.endif_label);
2483 POPSTACK (cond_stack);
2484 clear_last_expr ();
2487 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2488 loop should be exited by `exit_something'. This is a loop for which
2489 `expand_continue' will jump to the top of the loop.
2491 Make an entry on loop_stack to record the labels associated with
2492 this loop. */
2494 struct nesting *
2495 expand_start_loop (int exit_flag)
2497 struct nesting *thisloop = ALLOC_NESTING ();
2499 /* Make an entry on loop_stack for the loop we are entering. */
2501 thisloop->desc = LOOP_NESTING;
2502 thisloop->next = loop_stack;
2503 thisloop->all = nesting_stack;
2504 thisloop->depth = ++nesting_depth;
2505 thisloop->data.loop.start_label = gen_label_rtx ();
2506 thisloop->data.loop.end_label = gen_label_rtx ();
2507 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2508 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2509 loop_stack = thisloop;
2510 nesting_stack = thisloop;
2512 do_pending_stack_adjust ();
2513 emit_queue ();
2514 emit_note (NOTE_INSN_LOOP_BEG);
2515 emit_label (thisloop->data.loop.start_label);
2517 return thisloop;
2520 /* Like expand_start_loop but for a loop where the continuation point
2521 (for expand_continue_loop) will be specified explicitly. */
2523 struct nesting *
2524 expand_start_loop_continue_elsewhere (int exit_flag)
2526 struct nesting *thisloop = expand_start_loop (exit_flag);
2527 loop_stack->data.loop.continue_label = gen_label_rtx ();
2528 return thisloop;
2531 /* Begin a null, aka do { } while (0) "loop". But since the contents
2532 of said loop can still contain a break, we must frob the loop nest. */
2534 struct nesting *
2535 expand_start_null_loop (void)
2537 struct nesting *thisloop = ALLOC_NESTING ();
2539 /* Make an entry on loop_stack for the loop we are entering. */
2541 thisloop->desc = LOOP_NESTING;
2542 thisloop->next = loop_stack;
2543 thisloop->all = nesting_stack;
2544 thisloop->depth = ++nesting_depth;
2545 thisloop->data.loop.start_label = emit_note (NOTE_INSN_DELETED);
2546 thisloop->data.loop.end_label = gen_label_rtx ();
2547 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2548 thisloop->exit_label = thisloop->data.loop.end_label;
2549 loop_stack = thisloop;
2550 nesting_stack = thisloop;
2552 return thisloop;
2555 /* Specify the continuation point for a loop started with
2556 expand_start_loop_continue_elsewhere.
2557 Use this at the point in the code to which a continue statement
2558 should jump. */
2560 void
2561 expand_loop_continue_here (void)
2563 do_pending_stack_adjust ();
2564 emit_note (NOTE_INSN_LOOP_CONT);
2565 emit_label (loop_stack->data.loop.continue_label);
2568 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2569 Pop the block off of loop_stack. */
2571 void
2572 expand_end_loop (void)
2574 rtx start_label = loop_stack->data.loop.start_label;
2575 rtx etc_note;
2576 int eh_regions, debug_blocks;
2577 bool empty_test;
2579 /* Mark the continue-point at the top of the loop if none elsewhere. */
2580 if (start_label == loop_stack->data.loop.continue_label)
2581 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2583 do_pending_stack_adjust ();
2585 /* If the loop starts with a loop exit, roll that to the end where
2586 it will optimize together with the jump back.
2588 If the loop presently looks like this (in pseudo-C):
2590 LOOP_BEG
2591 start_label:
2592 if (test) goto end_label;
2593 LOOP_END_TOP_COND
2594 body;
2595 goto start_label;
2596 end_label:
2598 transform it to look like:
2600 LOOP_BEG
2601 goto start_label;
2602 top_label:
2603 body;
2604 start_label:
2605 if (test) goto end_label;
2606 goto top_label;
2607 end_label:
2609 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2610 the end of the entry conditional. Without this, our lexical scan
2611 can't tell the difference between an entry conditional and a
2612 body conditional that exits the loop. Mistaking the two means
2613 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2614 screw up loop unrolling.
2616 Things will be oh so much better when loop optimization is done
2617 off of a proper control flow graph... */
2619 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2621 empty_test = true;
2622 eh_regions = debug_blocks = 0;
2623 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2624 if (GET_CODE (etc_note) == NOTE)
2626 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2627 break;
2629 /* We must not walk into a nested loop. */
2630 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2632 etc_note = NULL_RTX;
2633 break;
2636 /* At the same time, scan for EH region notes, as we don't want
2637 to scrog region nesting. This shouldn't happen, but... */
2638 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2639 eh_regions++;
2640 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2642 if (--eh_regions < 0)
2643 /* We've come to the end of an EH region, but never saw the
2644 beginning of that region. That means that an EH region
2645 begins before the top of the loop, and ends in the middle
2646 of it. The existence of such a situation violates a basic
2647 assumption in this code, since that would imply that even
2648 when EH_REGIONS is zero, we might move code out of an
2649 exception region. */
2650 abort ();
2653 /* Likewise for debug scopes. In this case we'll either (1) move
2654 all of the notes if they are properly nested or (2) leave the
2655 notes alone and only rotate the loop at high optimization
2656 levels when we expect to scrog debug info. */
2657 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2658 debug_blocks++;
2659 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2660 debug_blocks--;
2662 else if (INSN_P (etc_note))
2663 empty_test = false;
2665 if (etc_note
2666 && optimize
2667 && ! empty_test
2668 && eh_regions == 0
2669 && (debug_blocks == 0 || optimize >= 2)
2670 && NEXT_INSN (etc_note) != NULL_RTX
2671 && ! any_condjump_p (get_last_insn ()))
2673 /* We found one. Move everything from START to ETC to the end
2674 of the loop, and add a jump from the top of the loop. */
2675 rtx top_label = gen_label_rtx ();
2676 rtx start_move = start_label;
2678 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2679 then we want to move this note also. */
2680 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2681 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2682 start_move = PREV_INSN (start_move);
2684 emit_label_before (top_label, start_move);
2686 /* Actually move the insns. If the debug scopes are nested, we
2687 can move everything at once. Otherwise we have to move them
2688 one by one and squeeze out the block notes. */
2689 if (debug_blocks == 0)
2690 reorder_insns (start_move, etc_note, get_last_insn ());
2691 else
2693 rtx insn, next_insn;
2694 for (insn = start_move; insn; insn = next_insn)
2696 /* Figure out which insn comes after this one. We have
2697 to do this before we move INSN. */
2698 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2700 if (GET_CODE (insn) == NOTE
2701 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2702 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2703 continue;
2705 reorder_insns (insn, insn, get_last_insn ());
2709 /* Add the jump from the top of the loop. */
2710 emit_jump_insn_before (gen_jump (start_label), top_label);
2711 emit_barrier_before (top_label);
2712 start_label = top_label;
2715 emit_jump (start_label);
2716 emit_note (NOTE_INSN_LOOP_END);
2717 emit_label (loop_stack->data.loop.end_label);
2719 POPSTACK (loop_stack);
2721 clear_last_expr ();
2724 /* Finish a null loop, aka do { } while (0). */
2726 void
2727 expand_end_null_loop (void)
2729 do_pending_stack_adjust ();
2730 emit_label (loop_stack->data.loop.end_label);
2732 POPSTACK (loop_stack);
2734 clear_last_expr ();
2737 /* Generate a jump to the current loop's continue-point.
2738 This is usually the top of the loop, but may be specified
2739 explicitly elsewhere. If not currently inside a loop,
2740 return 0 and do nothing; caller will print an error message. */
2743 expand_continue_loop (struct nesting *whichloop)
2745 /* Emit information for branch prediction. */
2746 rtx note;
2748 if (flag_guess_branch_prob)
2750 note = emit_note (NOTE_INSN_PREDICTION);
2751 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2753 clear_last_expr ();
2754 if (whichloop == 0)
2755 whichloop = loop_stack;
2756 if (whichloop == 0)
2757 return 0;
2758 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2759 NULL_RTX);
2760 return 1;
2763 /* Generate a jump to exit the current loop. If not currently inside a loop,
2764 return 0 and do nothing; caller will print an error message. */
2767 expand_exit_loop (struct nesting *whichloop)
2769 clear_last_expr ();
2770 if (whichloop == 0)
2771 whichloop = loop_stack;
2772 if (whichloop == 0)
2773 return 0;
2774 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2775 return 1;
2778 /* Generate a conditional jump to exit the current loop if COND
2779 evaluates to zero. If not currently inside a loop,
2780 return 0 and do nothing; caller will print an error message. */
2783 expand_exit_loop_if_false (struct nesting *whichloop, tree cond)
2785 rtx label;
2786 clear_last_expr ();
2788 if (whichloop == 0)
2789 whichloop = loop_stack;
2790 if (whichloop == 0)
2791 return 0;
2793 if (integer_nonzerop (cond))
2794 return 1;
2795 if (integer_zerop (cond))
2796 return expand_exit_loop (whichloop);
2798 /* Check if we definitely won't need a fixup. */
2799 if (whichloop == nesting_stack)
2801 jumpifnot (cond, whichloop->data.loop.end_label);
2802 return 1;
2805 /* In order to handle fixups, we actually create a conditional jump
2806 around an unconditional branch to exit the loop. If fixups are
2807 necessary, they go before the unconditional branch. */
2809 label = gen_label_rtx ();
2810 jumpif (cond, label);
2811 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2812 NULL_RTX);
2813 emit_label (label);
2815 return 1;
2818 /* Like expand_exit_loop_if_false except also emit a note marking
2819 the end of the conditional. Should only be used immediately
2820 after expand_loop_start. */
2823 expand_exit_loop_top_cond (struct nesting *whichloop, tree cond)
2825 if (! expand_exit_loop_if_false (whichloop, cond))
2826 return 0;
2828 emit_note (NOTE_INSN_LOOP_END_TOP_COND);
2829 return 1;
2832 /* Return nonzero if we should preserve sub-expressions as separate
2833 pseudos. We never do so if we aren't optimizing. We always do so
2834 if -fexpensive-optimizations.
2836 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2837 the loop may still be a small one. */
2840 preserve_subexpressions_p (void)
2842 rtx insn;
2844 if (flag_expensive_optimizations)
2845 return 1;
2847 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2848 return 0;
2850 insn = get_last_insn_anywhere ();
2852 return (insn
2853 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2854 < n_non_fixed_regs * 3));
2858 /* Generate a jump to exit the current loop, conditional, binding contour
2859 or case statement. Not all such constructs are visible to this function,
2860 only those started with EXIT_FLAG nonzero. Individual languages use
2861 the EXIT_FLAG parameter to control which kinds of constructs you can
2862 exit this way.
2864 If not currently inside anything that can be exited,
2865 return 0 and do nothing; caller will print an error message. */
2868 expand_exit_something (void)
2870 struct nesting *n;
2871 clear_last_expr ();
2872 for (n = nesting_stack; n; n = n->all)
2873 if (n->exit_label != 0)
2875 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2876 return 1;
2879 return 0;
2882 /* Generate RTL to return from the current function, with no value.
2883 (That is, we do not do anything about returning any value.) */
2885 void
2886 expand_null_return (void)
2888 rtx last_insn;
2890 last_insn = get_last_insn ();
2892 /* If this function was declared to return a value, but we
2893 didn't, clobber the return registers so that they are not
2894 propagated live to the rest of the function. */
2895 clobber_return_register ();
2897 expand_null_return_1 (last_insn);
2900 /* Generate RTL to return directly from the current function.
2901 (That is, we bypass any return value.) */
2903 void
2904 expand_naked_return (void)
2906 rtx last_insn, end_label;
2908 last_insn = get_last_insn ();
2909 end_label = naked_return_label;
2911 clear_pending_stack_adjust ();
2912 do_pending_stack_adjust ();
2913 clear_last_expr ();
2915 if (end_label == 0)
2916 end_label = naked_return_label = gen_label_rtx ();
2917 expand_goto_internal (NULL_TREE, end_label, last_insn);
2920 /* Try to guess whether the value of return means error code. */
2921 static enum br_predictor
2922 return_prediction (rtx val)
2924 /* Different heuristics for pointers and scalars. */
2925 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2927 /* NULL is usually not returned. */
2928 if (val == const0_rtx)
2929 return PRED_NULL_RETURN;
2931 else
2933 /* Negative return values are often used to indicate
2934 errors. */
2935 if (GET_CODE (val) == CONST_INT
2936 && INTVAL (val) < 0)
2937 return PRED_NEGATIVE_RETURN;
2938 /* Constant return values are also usually erors,
2939 zero/one often mean booleans so exclude them from the
2940 heuristics. */
2941 if (CONSTANT_P (val)
2942 && (val != const0_rtx && val != const1_rtx))
2943 return PRED_CONST_RETURN;
2945 return PRED_NO_PREDICTION;
2949 /* If the current function returns values in the most significant part
2950 of a register, shift return value VAL appropriately. The mode of
2951 the function's return type is known not to be BLKmode. */
2953 static rtx
2954 shift_return_value (rtx val)
2956 tree type;
2958 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2959 if (targetm.calls.return_in_msb (type))
2961 rtx target;
2962 HOST_WIDE_INT shift;
2964 target = DECL_RTL (DECL_RESULT (current_function_decl));
2965 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2966 - BITS_PER_UNIT * int_size_in_bytes (type));
2967 if (shift > 0)
2968 val = expand_binop (GET_MODE (target), ashl_optab,
2969 gen_lowpart (GET_MODE (target), val),
2970 GEN_INT (shift), target, 1, OPTAB_WIDEN);
2972 return val;
2976 /* Generate RTL to return from the current function, with value VAL. */
2978 static void
2979 expand_value_return (rtx val)
2981 rtx last_insn;
2982 rtx return_reg;
2983 enum br_predictor pred;
2985 if (flag_guess_branch_prob
2986 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2988 /* Emit information for branch prediction. */
2989 rtx note;
2991 note = emit_note (NOTE_INSN_PREDICTION);
2993 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2997 last_insn = get_last_insn ();
2998 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
3000 /* Copy the value to the return location
3001 unless it's already there. */
3003 if (return_reg != val)
3005 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3006 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
3008 int unsignedp = TREE_UNSIGNED (type);
3009 enum machine_mode old_mode
3010 = DECL_MODE (DECL_RESULT (current_function_decl));
3011 enum machine_mode mode
3012 = promote_mode (type, old_mode, &unsignedp, 1);
3014 if (mode != old_mode)
3015 val = convert_modes (mode, old_mode, val, unsignedp);
3017 if (GET_CODE (return_reg) == PARALLEL)
3018 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3019 else
3020 emit_move_insn (return_reg, val);
3023 expand_null_return_1 (last_insn);
3026 /* Output a return with no value. If LAST_INSN is nonzero,
3027 pretend that the return takes place after LAST_INSN. */
3029 static void
3030 expand_null_return_1 (rtx last_insn)
3032 rtx end_label = cleanup_label ? cleanup_label : return_label;
3034 clear_pending_stack_adjust ();
3035 do_pending_stack_adjust ();
3036 clear_last_expr ();
3038 if (end_label == 0)
3039 end_label = return_label = gen_label_rtx ();
3040 expand_goto_internal (NULL_TREE, end_label, last_insn);
3043 /* Generate RTL to evaluate the expression RETVAL and return it
3044 from the current function. */
3046 void
3047 expand_return (tree retval)
3049 /* If there are any cleanups to be performed, then they will
3050 be inserted following LAST_INSN. It is desirable
3051 that the last_insn, for such purposes, should be the
3052 last insn before computing the return value. Otherwise, cleanups
3053 which call functions can clobber the return value. */
3054 /* ??? rms: I think that is erroneous, because in C++ it would
3055 run destructors on variables that might be used in the subsequent
3056 computation of the return value. */
3057 rtx last_insn = 0;
3058 rtx result_rtl;
3059 rtx val = 0;
3060 tree retval_rhs;
3062 /* If function wants no value, give it none. */
3063 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3065 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3066 emit_queue ();
3067 expand_null_return ();
3068 return;
3071 if (retval == error_mark_node)
3073 /* Treat this like a return of no value from a function that
3074 returns a value. */
3075 expand_null_return ();
3076 return;
3078 else if (TREE_CODE (retval) == RESULT_DECL)
3079 retval_rhs = retval;
3080 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3081 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3082 retval_rhs = TREE_OPERAND (retval, 1);
3083 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3084 /* Recognize tail-recursive call to void function. */
3085 retval_rhs = retval;
3086 else
3087 retval_rhs = NULL_TREE;
3089 last_insn = get_last_insn ();
3091 /* Distribute return down conditional expr if either of the sides
3092 may involve tail recursion (see test below). This enhances the number
3093 of tail recursions we see. Don't do this always since it can produce
3094 sub-optimal code in some cases and we distribute assignments into
3095 conditional expressions when it would help. */
3097 if (optimize && retval_rhs != 0
3098 && frame_offset == 0
3099 && TREE_CODE (retval_rhs) == COND_EXPR
3100 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3101 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3103 rtx label = gen_label_rtx ();
3104 tree expr;
3106 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3107 start_cleanup_deferral ();
3108 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3109 DECL_RESULT (current_function_decl),
3110 TREE_OPERAND (retval_rhs, 1));
3111 TREE_SIDE_EFFECTS (expr) = 1;
3112 expand_return (expr);
3113 emit_label (label);
3115 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3116 DECL_RESULT (current_function_decl),
3117 TREE_OPERAND (retval_rhs, 2));
3118 TREE_SIDE_EFFECTS (expr) = 1;
3119 expand_return (expr);
3120 end_cleanup_deferral ();
3121 return;
3124 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3126 /* If the result is an aggregate that is being returned in one (or more)
3127 registers, load the registers here. The compiler currently can't handle
3128 copying a BLKmode value into registers. We could put this code in a
3129 more general area (for use by everyone instead of just function
3130 call/return), but until this feature is generally usable it is kept here
3131 (and in expand_call). The value must go into a pseudo in case there
3132 are cleanups that will clobber the real return register. */
3134 if (retval_rhs != 0
3135 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3136 && GET_CODE (result_rtl) == REG)
3138 int i;
3139 unsigned HOST_WIDE_INT bitpos, xbitpos;
3140 unsigned HOST_WIDE_INT padding_correction = 0;
3141 unsigned HOST_WIDE_INT bytes
3142 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3143 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3144 unsigned int bitsize
3145 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3146 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
3147 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3148 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3149 enum machine_mode tmpmode, result_reg_mode;
3151 if (bytes == 0)
3153 expand_null_return ();
3154 return;
3157 /* If the structure doesn't take up a whole number of words, see
3158 whether the register value should be padded on the left or on
3159 the right. Set PADDING_CORRECTION to the number of padding
3160 bits needed on the left side.
3162 In most ABIs, the structure will be returned at the least end of
3163 the register, which translates to right padding on little-endian
3164 targets and left padding on big-endian targets. The opposite
3165 holds if the structure is returned at the most significant
3166 end of the register. */
3167 if (bytes % UNITS_PER_WORD != 0
3168 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
3169 ? !BYTES_BIG_ENDIAN
3170 : BYTES_BIG_ENDIAN))
3171 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3172 * BITS_PER_UNIT));
3174 /* Copy the structure BITSIZE bits at a time. */
3175 for (bitpos = 0, xbitpos = padding_correction;
3176 bitpos < bytes * BITS_PER_UNIT;
3177 bitpos += bitsize, xbitpos += bitsize)
3179 /* We need a new destination pseudo each time xbitpos is
3180 on a word boundary and when xbitpos == padding_correction
3181 (the first time through). */
3182 if (xbitpos % BITS_PER_WORD == 0
3183 || xbitpos == padding_correction)
3185 /* Generate an appropriate register. */
3186 dst = gen_reg_rtx (word_mode);
3187 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3189 /* Clear the destination before we move anything into it. */
3190 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3193 /* We need a new source operand each time bitpos is on a word
3194 boundary. */
3195 if (bitpos % BITS_PER_WORD == 0)
3196 src = operand_subword_force (result_val,
3197 bitpos / BITS_PER_WORD,
3198 BLKmode);
3200 /* Use bitpos for the source extraction (left justified) and
3201 xbitpos for the destination store (right justified). */
3202 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3203 extract_bit_field (src, bitsize,
3204 bitpos % BITS_PER_WORD, 1,
3205 NULL_RTX, word_mode, word_mode,
3206 BITS_PER_WORD),
3207 BITS_PER_WORD);
3210 tmpmode = GET_MODE (result_rtl);
3211 if (tmpmode == BLKmode)
3213 /* Find the smallest integer mode large enough to hold the
3214 entire structure and use that mode instead of BLKmode
3215 on the USE insn for the return register. */
3216 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3217 tmpmode != VOIDmode;
3218 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3219 /* Have we found a large enough mode? */
3220 if (GET_MODE_SIZE (tmpmode) >= bytes)
3221 break;
3223 /* No suitable mode found. */
3224 if (tmpmode == VOIDmode)
3225 abort ();
3227 PUT_MODE (result_rtl, tmpmode);
3230 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3231 result_reg_mode = word_mode;
3232 else
3233 result_reg_mode = tmpmode;
3234 result_reg = gen_reg_rtx (result_reg_mode);
3236 emit_queue ();
3237 for (i = 0; i < n_regs; i++)
3238 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3239 result_pseudos[i]);
3241 if (tmpmode != result_reg_mode)
3242 result_reg = gen_lowpart (tmpmode, result_reg);
3244 expand_value_return (result_reg);
3246 else if (retval_rhs != 0
3247 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3248 && (GET_CODE (result_rtl) == REG
3249 || (GET_CODE (result_rtl) == PARALLEL)))
3251 /* Calculate the return value into a temporary (usually a pseudo
3252 reg). */
3253 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3254 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3256 val = assign_temp (nt, 0, 0, 1);
3257 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3258 val = force_not_mem (val);
3259 emit_queue ();
3260 /* Return the calculated value, doing cleanups first. */
3261 expand_value_return (shift_return_value (val));
3263 else
3265 /* No cleanups or no hard reg used;
3266 calculate value into hard return reg. */
3267 expand_expr (retval, const0_rtx, VOIDmode, 0);
3268 emit_queue ();
3269 expand_value_return (result_rtl);
3273 /* Attempt to optimize a potential tail recursion call into a goto.
3274 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3275 where to place the jump to the tail recursion label.
3277 Return TRUE if the call was optimized into a goto. */
3280 optimize_tail_recursion (tree arguments, rtx last_insn)
3282 /* Finish checking validity, and if valid emit code to set the
3283 argument variables for the new call. */
3284 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3286 if (tail_recursion_label == 0)
3288 tail_recursion_label = gen_label_rtx ();
3289 emit_label_after (tail_recursion_label,
3290 tail_recursion_reentry);
3292 emit_queue ();
3293 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3294 emit_barrier ();
3295 return 1;
3297 return 0;
3300 /* Emit code to alter this function's formal parms for a tail-recursive call.
3301 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3302 FORMALS is the chain of decls of formals.
3303 Return 1 if this can be done;
3304 otherwise return 0 and do not emit any code. */
3306 static int
3307 tail_recursion_args (tree actuals, tree formals)
3309 tree a = actuals, f = formals;
3310 int i;
3311 rtx *argvec;
3313 /* Check that number and types of actuals are compatible
3314 with the formals. This is not always true in valid C code.
3315 Also check that no formal needs to be addressable
3316 and that all formals are scalars. */
3318 /* Also count the args. */
3320 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3322 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3323 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3324 return 0;
3325 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3326 return 0;
3328 if (a != 0 || f != 0)
3329 return 0;
3331 /* Compute all the actuals. */
3333 argvec = alloca (i * sizeof (rtx));
3335 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3336 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3338 /* Find which actual values refer to current values of previous formals.
3339 Copy each of them now, before any formal is changed. */
3341 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3343 int copy = 0;
3344 int j;
3345 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3346 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3348 copy = 1;
3349 break;
3351 if (copy)
3352 argvec[i] = copy_to_reg (argvec[i]);
3355 /* Store the values of the actuals into the formals. */
3357 for (f = formals, a = actuals, i = 0; f;
3358 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3360 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3361 emit_move_insn (DECL_RTL (f), argvec[i]);
3362 else
3364 rtx tmp = argvec[i];
3365 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
3366 promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3367 &unsignedp, 0);
3368 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3370 tmp = gen_reg_rtx (DECL_MODE (f));
3371 convert_move (tmp, argvec[i], unsignedp);
3373 convert_move (DECL_RTL (f), tmp, unsignedp);
3377 free_temp_slots ();
3378 return 1;
3381 /* Generate the RTL code for entering a binding contour.
3382 The variables are declared one by one, by calls to `expand_decl'.
3384 FLAGS is a bitwise or of the following flags:
3386 1 - Nonzero if this construct should be visible to
3387 `exit_something'.
3389 2 - Nonzero if this contour does not require a
3390 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3391 language-independent code should set this flag because they
3392 will not create corresponding BLOCK nodes. (There should be
3393 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3394 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3395 when expand_end_bindings is called.
3397 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3398 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3399 note. */
3401 void
3402 expand_start_bindings_and_block (int flags, tree block)
3404 struct nesting *thisblock = ALLOC_NESTING ();
3405 rtx note;
3406 int exit_flag = ((flags & 1) != 0);
3407 int block_flag = ((flags & 2) == 0);
3409 /* If a BLOCK is supplied, then the caller should be requesting a
3410 NOTE_INSN_BLOCK_BEG note. */
3411 if (!block_flag && block)
3412 abort ();
3414 /* Create a note to mark the beginning of the block. */
3415 if (block_flag)
3417 note = emit_note (NOTE_INSN_BLOCK_BEG);
3418 NOTE_BLOCK (note) = block;
3420 else
3421 note = emit_note (NOTE_INSN_DELETED);
3423 /* Make an entry on block_stack for the block we are entering. */
3425 thisblock->desc = BLOCK_NESTING;
3426 thisblock->next = block_stack;
3427 thisblock->all = nesting_stack;
3428 thisblock->depth = ++nesting_depth;
3429 thisblock->data.block.stack_level = 0;
3430 thisblock->data.block.cleanups = 0;
3431 thisblock->data.block.exception_region = 0;
3432 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3434 thisblock->data.block.conditional_code = 0;
3435 thisblock->data.block.last_unconditional_cleanup = note;
3436 /* When we insert instructions after the last unconditional cleanup,
3437 we don't adjust last_insn. That means that a later add_insn will
3438 clobber the instructions we've just added. The easiest way to
3439 fix this is to just insert another instruction here, so that the
3440 instructions inserted after the last unconditional cleanup are
3441 never the last instruction. */
3442 emit_note (NOTE_INSN_DELETED);
3444 if (block_stack
3445 && !(block_stack->data.block.cleanups == NULL_TREE
3446 && block_stack->data.block.outer_cleanups == NULL_TREE))
3447 thisblock->data.block.outer_cleanups
3448 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3449 block_stack->data.block.outer_cleanups);
3450 else
3451 thisblock->data.block.outer_cleanups = 0;
3452 thisblock->data.block.label_chain = 0;
3453 thisblock->data.block.innermost_stack_block = stack_block_stack;
3454 thisblock->data.block.first_insn = note;
3455 thisblock->data.block.block_start_count = ++current_block_start_count;
3456 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3457 block_stack = thisblock;
3458 nesting_stack = thisblock;
3460 /* Make a new level for allocating stack slots. */
3461 push_temp_slots ();
3464 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3465 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3466 expand_expr are made. After we end the region, we know that all
3467 space for all temporaries that were created by TARGET_EXPRs will be
3468 destroyed and their space freed for reuse. */
3470 void
3471 expand_start_target_temps (void)
3473 /* This is so that even if the result is preserved, the space
3474 allocated will be freed, as we know that it is no longer in use. */
3475 push_temp_slots ();
3477 /* Start a new binding layer that will keep track of all cleanup
3478 actions to be performed. */
3479 expand_start_bindings (2);
3481 target_temp_slot_level = temp_slot_level;
3484 void
3485 expand_end_target_temps (void)
3487 expand_end_bindings (NULL_TREE, 0, 0);
3489 /* This is so that even if the result is preserved, the space
3490 allocated will be freed, as we know that it is no longer in use. */
3491 pop_temp_slots ();
3494 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3495 in question represents the outermost pair of curly braces (i.e. the "body
3496 block") of a function or method.
3498 For any BLOCK node representing a "body block" of a function or method, the
3499 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3500 represents the outermost (function) scope for the function or method (i.e.
3501 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3502 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3505 is_body_block (tree stmt)
3507 if (lang_hooks.no_body_blocks)
3508 return 0;
3510 if (TREE_CODE (stmt) == BLOCK)
3512 tree parent = BLOCK_SUPERCONTEXT (stmt);
3514 if (parent && TREE_CODE (parent) == BLOCK)
3516 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3518 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3519 return 1;
3523 return 0;
3526 /* True if we are currently emitting insns in an area of output code
3527 that is controlled by a conditional expression. This is used by
3528 the cleanup handling code to generate conditional cleanup actions. */
3531 conditional_context (void)
3533 return block_stack && block_stack->data.block.conditional_code;
3536 /* Return an opaque pointer to the current nesting level, so frontend code
3537 can check its own sanity. */
3539 struct nesting *
3540 current_nesting_level (void)
3542 return cfun ? block_stack : 0;
3545 /* Emit a handler label for a nonlocal goto handler.
3546 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3548 static rtx
3549 expand_nl_handler_label (rtx slot, rtx before_insn)
3551 rtx insns;
3552 rtx handler_label = gen_label_rtx ();
3554 /* Don't let cleanup_cfg delete the handler. */
3555 LABEL_PRESERVE_P (handler_label) = 1;
3557 start_sequence ();
3558 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3559 insns = get_insns ();
3560 end_sequence ();
3561 emit_insn_before (insns, before_insn);
3563 emit_label (handler_label);
3565 return handler_label;
3568 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3569 handler. */
3570 static void
3571 expand_nl_goto_receiver (void)
3573 /* Clobber the FP when we get here, so we have to make sure it's
3574 marked as used by this function. */
3575 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
3577 /* Mark the static chain as clobbered here so life information
3578 doesn't get messed up for it. */
3579 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
3581 #ifdef HAVE_nonlocal_goto
3582 if (! HAVE_nonlocal_goto)
3583 #endif
3584 /* First adjust our frame pointer to its actual value. It was
3585 previously set to the start of the virtual area corresponding to
3586 the stacked variables when we branched here and now needs to be
3587 adjusted to the actual hardware fp value.
3589 Assignments are to virtual registers are converted by
3590 instantiate_virtual_regs into the corresponding assignment
3591 to the underlying register (fp in this case) that makes
3592 the original assignment true.
3593 So the following insn will actually be
3594 decrementing fp by STARTING_FRAME_OFFSET. */
3595 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3597 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3598 if (fixed_regs[ARG_POINTER_REGNUM])
3600 #ifdef ELIMINABLE_REGS
3601 /* If the argument pointer can be eliminated in favor of the
3602 frame pointer, we don't need to restore it. We assume here
3603 that if such an elimination is present, it can always be used.
3604 This is the case on all known machines; if we don't make this
3605 assumption, we do unnecessary saving on many machines. */
3606 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3607 size_t i;
3609 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3610 if (elim_regs[i].from == ARG_POINTER_REGNUM
3611 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3612 break;
3614 if (i == ARRAY_SIZE (elim_regs))
3615 #endif
3617 /* Now restore our arg pointer from the address at which it
3618 was saved in our stack frame. */
3619 emit_move_insn (virtual_incoming_args_rtx,
3620 copy_to_reg (get_arg_pointer_save_area (cfun)));
3623 #endif
3625 #ifdef HAVE_nonlocal_goto_receiver
3626 if (HAVE_nonlocal_goto_receiver)
3627 emit_insn (gen_nonlocal_goto_receiver ());
3628 #endif
3630 /* @@@ This is a kludge. Not all machine descriptions define a blockage
3631 insn, but we must not allow the code we just generated to be reordered
3632 by scheduling. Specifically, the update of the frame pointer must
3633 happen immediately, not later. So emit an ASM_INPUT to act as blockage
3634 insn. */
3635 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
3638 /* Make handlers for nonlocal gotos taking place in the function calls in
3639 block THISBLOCK. */
3641 static void
3642 expand_nl_goto_receivers (struct nesting *thisblock)
3644 tree link;
3645 rtx afterward = gen_label_rtx ();
3646 rtx insns, slot;
3647 rtx label_list;
3648 int any_invalid;
3650 /* Record the handler address in the stack slot for that purpose,
3651 during this block, saving and restoring the outer value. */
3652 if (thisblock->next != 0)
3653 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3655 rtx save_receiver = gen_reg_rtx (Pmode);
3656 emit_move_insn (XEXP (slot, 0), save_receiver);
3658 start_sequence ();
3659 emit_move_insn (save_receiver, XEXP (slot, 0));
3660 insns = get_insns ();
3661 end_sequence ();
3662 emit_insn_before (insns, thisblock->data.block.first_insn);
3665 /* Jump around the handlers; they run only when specially invoked. */
3666 emit_jump (afterward);
3668 /* Make a separate handler for each label. */
3669 link = nonlocal_labels;
3670 slot = nonlocal_goto_handler_slots;
3671 label_list = NULL_RTX;
3672 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3673 /* Skip any labels we shouldn't be able to jump to from here,
3674 we generate one special handler for all of them below which just calls
3675 abort. */
3676 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3678 rtx lab;
3679 lab = expand_nl_handler_label (XEXP (slot, 0),
3680 thisblock->data.block.first_insn);
3681 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3683 expand_nl_goto_receiver ();
3685 /* Jump to the "real" nonlocal label. */
3686 expand_goto (TREE_VALUE (link));
3689 /* A second pass over all nonlocal labels; this time we handle those
3690 we should not be able to jump to at this point. */
3691 link = nonlocal_labels;
3692 slot = nonlocal_goto_handler_slots;
3693 any_invalid = 0;
3694 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3695 if (DECL_TOO_LATE (TREE_VALUE (link)))
3697 rtx lab;
3698 lab = expand_nl_handler_label (XEXP (slot, 0),
3699 thisblock->data.block.first_insn);
3700 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3701 any_invalid = 1;
3704 if (any_invalid)
3706 expand_nl_goto_receiver ();
3707 expand_builtin_trap ();
3710 nonlocal_goto_handler_labels = label_list;
3711 emit_label (afterward);
3714 /* Warn about any unused VARS (which may contain nodes other than
3715 VAR_DECLs, but such nodes are ignored). The nodes are connected
3716 via the TREE_CHAIN field. */
3718 void
3719 warn_about_unused_variables (tree vars)
3721 tree decl;
3723 if (warn_unused_variable)
3724 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3725 if (TREE_CODE (decl) == VAR_DECL
3726 && ! TREE_USED (decl)
3727 && ! DECL_IN_SYSTEM_HEADER (decl)
3728 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3729 warning ("%Junused variable '%D'", decl, decl);
3732 /* Generate RTL code to terminate a binding contour.
3734 VARS is the chain of VAR_DECL nodes for the variables bound in this
3735 contour. There may actually be other nodes in this chain, but any
3736 nodes other than VAR_DECLS are ignored.
3738 MARK_ENDS is nonzero if we should put a note at the beginning
3739 and end of this binding contour.
3741 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3742 zero if we can jump into this contour only if it does not have a saved
3743 stack level, and negative if we are not to check for invalid use of
3744 labels (because the front end does that). */
3746 void
3747 expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
3749 struct nesting *thisblock = block_stack;
3751 /* If any of the variables in this scope were not used, warn the
3752 user. */
3753 warn_about_unused_variables (vars);
3755 if (thisblock->exit_label)
3757 do_pending_stack_adjust ();
3758 emit_label (thisblock->exit_label);
3761 /* If necessary, make handlers for nonlocal gotos taking
3762 place in the function calls in this block. */
3763 if (function_call_count != 0 && nonlocal_labels
3764 /* Make handler for outermost block
3765 if there were any nonlocal gotos to this function. */
3766 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3767 /* Make handler for inner block if it has something
3768 special to do when you jump out of it. */
3769 : (thisblock->data.block.cleanups != 0
3770 || thisblock->data.block.stack_level != 0)))
3771 expand_nl_goto_receivers (thisblock);
3773 /* Don't allow jumping into a block that has a stack level.
3774 Cleanups are allowed, though. */
3775 if (dont_jump_in > 0
3776 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3778 struct label_chain *chain;
3780 /* Any labels in this block are no longer valid to go to.
3781 Mark them to cause an error message. */
3782 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3784 DECL_TOO_LATE (chain->label) = 1;
3785 /* If any goto without a fixup came to this label,
3786 that must be an error, because gotos without fixups
3787 come from outside all saved stack-levels. */
3788 if (TREE_ADDRESSABLE (chain->label))
3789 error ("%Jlabel '%D' used before containing binding contour",
3790 chain->label, chain->label);
3794 /* Restore stack level in effect before the block
3795 (only if variable-size objects allocated). */
3796 /* Perform any cleanups associated with the block. */
3798 if (thisblock->data.block.stack_level != 0
3799 || thisblock->data.block.cleanups != 0)
3801 int reachable;
3802 rtx insn;
3804 /* Don't let cleanups affect ({...}) constructs. */
3805 int old_expr_stmts_for_value = expr_stmts_for_value;
3806 rtx old_last_expr_value = last_expr_value;
3807 rtx old_last_expr_alt_rtl = last_expr_alt_rtl;
3808 tree old_last_expr_type = last_expr_type;
3809 expr_stmts_for_value = 0;
3811 /* Only clean up here if this point can actually be reached. */
3812 insn = get_last_insn ();
3813 if (GET_CODE (insn) == NOTE)
3814 insn = prev_nonnote_insn (insn);
3815 reachable = (! insn || GET_CODE (insn) != BARRIER);
3817 /* Do the cleanups. */
3818 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3819 if (reachable)
3820 do_pending_stack_adjust ();
3822 expr_stmts_for_value = old_expr_stmts_for_value;
3823 last_expr_value = old_last_expr_value;
3824 last_expr_alt_rtl = old_last_expr_alt_rtl;
3825 last_expr_type = old_last_expr_type;
3827 /* Restore the stack level. */
3829 if (reachable && thisblock->data.block.stack_level != 0)
3831 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3832 thisblock->data.block.stack_level, NULL_RTX);
3833 if (nonlocal_goto_handler_slots != 0)
3834 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3835 NULL_RTX);
3838 /* Any gotos out of this block must also do these things.
3839 Also report any gotos with fixups that came to labels in this
3840 level. */
3841 fixup_gotos (thisblock,
3842 thisblock->data.block.stack_level,
3843 thisblock->data.block.cleanups,
3844 thisblock->data.block.first_insn,
3845 dont_jump_in);
3848 /* Mark the beginning and end of the scope if requested.
3849 We do this now, after running cleanups on the variables
3850 just going out of scope, so they are in scope for their cleanups. */
3852 if (mark_ends)
3854 rtx note = emit_note (NOTE_INSN_BLOCK_END);
3855 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3857 else
3858 /* Get rid of the beginning-mark if we don't make an end-mark. */
3859 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3861 /* Restore the temporary level of TARGET_EXPRs. */
3862 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3864 /* Restore block_stack level for containing block. */
3866 stack_block_stack = thisblock->data.block.innermost_stack_block;
3867 POPSTACK (block_stack);
3869 /* Pop the stack slot nesting and free any slots at this level. */
3870 pop_temp_slots ();
3873 /* Generate code to save the stack pointer at the start of the current block
3874 and set up to restore it on exit. */
3876 void
3877 save_stack_pointer (void)
3879 struct nesting *thisblock = block_stack;
3881 if (thisblock->data.block.stack_level == 0)
3883 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3884 &thisblock->data.block.stack_level,
3885 thisblock->data.block.first_insn);
3886 stack_block_stack = thisblock;
3890 /* Generate RTL for the automatic variable declaration DECL.
3891 (Other kinds of declarations are simply ignored if seen here.) */
3893 void
3894 expand_decl (tree decl)
3896 tree type;
3898 type = TREE_TYPE (decl);
3900 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3901 type in case this node is used in a reference. */
3902 if (TREE_CODE (decl) == CONST_DECL)
3904 DECL_MODE (decl) = TYPE_MODE (type);
3905 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3906 DECL_SIZE (decl) = TYPE_SIZE (type);
3907 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3908 return;
3911 /* Otherwise, only automatic variables need any expansion done. Static and
3912 external variables, and external functions, will be handled by
3913 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3914 nothing. PARM_DECLs are handled in `assign_parms'. */
3915 if (TREE_CODE (decl) != VAR_DECL)
3916 return;
3918 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3919 return;
3921 /* Create the RTL representation for the variable. */
3923 if (type == error_mark_node)
3924 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3926 else if (DECL_SIZE (decl) == 0)
3927 /* Variable with incomplete type. */
3929 rtx x;
3930 if (DECL_INITIAL (decl) == 0)
3931 /* Error message was already done; now avoid a crash. */
3932 x = gen_rtx_MEM (BLKmode, const0_rtx);
3933 else
3934 /* An initializer is going to decide the size of this array.
3935 Until we know the size, represent its address with a reg. */
3936 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3938 set_mem_attributes (x, decl, 1);
3939 SET_DECL_RTL (decl, x);
3941 else if (DECL_MODE (decl) != BLKmode
3942 /* If -ffloat-store, don't put explicit float vars
3943 into regs. */
3944 && !(flag_float_store
3945 && TREE_CODE (type) == REAL_TYPE)
3946 && ! TREE_THIS_VOLATILE (decl)
3947 && ! DECL_NONLOCAL (decl)
3948 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3950 /* Automatic variable that can go in a register. */
3951 int unsignedp = TREE_UNSIGNED (type);
3952 enum machine_mode reg_mode
3953 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3955 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3957 if (!DECL_ARTIFICIAL (decl))
3958 mark_user_reg (DECL_RTL (decl));
3960 if (POINTER_TYPE_P (type))
3961 mark_reg_pointer (DECL_RTL (decl),
3962 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3964 maybe_set_unchanging (DECL_RTL (decl), decl);
3966 /* If something wants our address, try to use ADDRESSOF. */
3967 if (TREE_ADDRESSABLE (decl))
3968 put_var_into_stack (decl, /*rescan=*/false);
3971 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3972 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3973 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3974 STACK_CHECK_MAX_VAR_SIZE)))
3976 /* Variable of fixed size that goes on the stack. */
3977 rtx oldaddr = 0;
3978 rtx addr;
3979 rtx x;
3981 /* If we previously made RTL for this decl, it must be an array
3982 whose size was determined by the initializer.
3983 The old address was a register; set that register now
3984 to the proper address. */
3985 if (DECL_RTL_SET_P (decl))
3987 if (GET_CODE (DECL_RTL (decl)) != MEM
3988 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3989 abort ();
3990 oldaddr = XEXP (DECL_RTL (decl), 0);
3993 /* Set alignment we actually gave this decl. */
3994 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3995 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3996 DECL_USER_ALIGN (decl) = 0;
3998 x = assign_temp (decl, 1, 1, 1);
3999 set_mem_attributes (x, decl, 1);
4000 SET_DECL_RTL (decl, x);
4002 if (oldaddr)
4004 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
4005 if (addr != oldaddr)
4006 emit_move_insn (oldaddr, addr);
4009 else
4010 /* Dynamic-size object: must push space on the stack. */
4012 rtx address, size, x;
4014 /* Record the stack pointer on entry to block, if have
4015 not already done so. */
4016 do_pending_stack_adjust ();
4017 save_stack_pointer ();
4019 /* In function-at-a-time mode, variable_size doesn't expand this,
4020 so do it now. */
4021 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4022 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4023 const0_rtx, VOIDmode, 0);
4025 /* Compute the variable's size, in bytes. */
4026 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4027 free_temp_slots ();
4029 /* Allocate space on the stack for the variable. Note that
4030 DECL_ALIGN says how the variable is to be aligned and we
4031 cannot use it to conclude anything about the alignment of
4032 the size. */
4033 address = allocate_dynamic_stack_space (size, NULL_RTX,
4034 TYPE_ALIGN (TREE_TYPE (decl)));
4036 /* Reference the variable indirect through that rtx. */
4037 x = gen_rtx_MEM (DECL_MODE (decl), address);
4038 set_mem_attributes (x, decl, 1);
4039 SET_DECL_RTL (decl, x);
4042 /* Indicate the alignment we actually gave this variable. */
4043 #ifdef STACK_BOUNDARY
4044 DECL_ALIGN (decl) = STACK_BOUNDARY;
4045 #else
4046 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4047 #endif
4048 DECL_USER_ALIGN (decl) = 0;
4052 /* Emit code to perform the initialization of a declaration DECL. */
4054 void
4055 expand_decl_init (tree decl)
4057 int was_used = TREE_USED (decl);
4059 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4060 for static decls. */
4061 if (TREE_CODE (decl) == CONST_DECL
4062 || TREE_STATIC (decl))
4063 return;
4065 /* Compute and store the initial value now. */
4067 push_temp_slots ();
4069 if (DECL_INITIAL (decl) == error_mark_node)
4071 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4073 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4074 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4075 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4077 emit_queue ();
4079 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4081 emit_line_note (DECL_SOURCE_LOCATION (decl));
4082 expand_assignment (decl, DECL_INITIAL (decl), 0);
4083 emit_queue ();
4086 /* Don't let the initialization count as "using" the variable. */
4087 TREE_USED (decl) = was_used;
4089 /* Free any temporaries we made while initializing the decl. */
4090 preserve_temp_slots (NULL_RTX);
4091 free_temp_slots ();
4092 pop_temp_slots ();
4095 /* CLEANUP is an expression to be executed at exit from this binding contour;
4096 for example, in C++, it might call the destructor for this variable.
4098 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4099 CLEANUP multiple times, and have the correct semantics. This
4100 happens in exception handling, for gotos, returns, breaks that
4101 leave the current scope.
4103 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4104 that is not associated with any particular variable. */
4107 expand_decl_cleanup (tree decl, tree cleanup)
4109 struct nesting *thisblock;
4111 /* Error if we are not in any block. */
4112 if (cfun == 0 || block_stack == 0)
4113 return 0;
4115 thisblock = block_stack;
4117 /* Record the cleanup if there is one. */
4119 if (cleanup != 0)
4121 tree t;
4122 rtx seq;
4123 tree *cleanups = &thisblock->data.block.cleanups;
4124 int cond_context = conditional_context ();
4126 if (cond_context)
4128 rtx flag = gen_reg_rtx (word_mode);
4129 rtx set_flag_0;
4130 tree cond;
4132 start_sequence ();
4133 emit_move_insn (flag, const0_rtx);
4134 set_flag_0 = get_insns ();
4135 end_sequence ();
4137 thisblock->data.block.last_unconditional_cleanup
4138 = emit_insn_after (set_flag_0,
4139 thisblock->data.block.last_unconditional_cleanup);
4141 emit_move_insn (flag, const1_rtx);
4143 cond = build_decl (VAR_DECL, NULL_TREE,
4144 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4145 SET_DECL_RTL (cond, flag);
4147 /* Conditionalize the cleanup. */
4148 cleanup = build (COND_EXPR, void_type_node,
4149 (*lang_hooks.truthvalue_conversion) (cond),
4150 cleanup, integer_zero_node);
4151 cleanup = fold (cleanup);
4153 cleanups = &thisblock->data.block.cleanups;
4156 cleanup = unsave_expr (cleanup);
4158 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4160 if (! cond_context)
4161 /* If this block has a cleanup, it belongs in stack_block_stack. */
4162 stack_block_stack = thisblock;
4164 if (cond_context)
4166 start_sequence ();
4169 if (! using_eh_for_cleanups_p)
4170 TREE_ADDRESSABLE (t) = 1;
4171 else
4172 expand_eh_region_start ();
4174 if (cond_context)
4176 seq = get_insns ();
4177 end_sequence ();
4178 if (seq)
4179 thisblock->data.block.last_unconditional_cleanup
4180 = emit_insn_after (seq,
4181 thisblock->data.block.last_unconditional_cleanup);
4183 else
4185 thisblock->data.block.last_unconditional_cleanup
4186 = get_last_insn ();
4187 /* When we insert instructions after the last unconditional cleanup,
4188 we don't adjust last_insn. That means that a later add_insn will
4189 clobber the instructions we've just added. The easiest way to
4190 fix this is to just insert another instruction here, so that the
4191 instructions inserted after the last unconditional cleanup are
4192 never the last instruction. */
4193 emit_note (NOTE_INSN_DELETED);
4196 return 1;
4199 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4200 is thrown. */
4203 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
4205 int ret = expand_decl_cleanup (decl, cleanup);
4206 if (cleanup && ret)
4208 tree node = block_stack->data.block.cleanups;
4209 CLEANUP_EH_ONLY (node) = eh_only;
4211 return ret;
4214 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4215 DECL_ELTS is the list of elements that belong to DECL's type.
4216 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4218 void
4219 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
4221 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4222 rtx x;
4223 tree t;
4225 /* If any of the elements are addressable, so is the entire union. */
4226 for (t = decl_elts; t; t = TREE_CHAIN (t))
4227 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4229 TREE_ADDRESSABLE (decl) = 1;
4230 break;
4233 expand_decl (decl);
4234 expand_decl_cleanup (decl, cleanup);
4235 x = DECL_RTL (decl);
4237 /* Go through the elements, assigning RTL to each. */
4238 for (t = decl_elts; t; t = TREE_CHAIN (t))
4240 tree decl_elt = TREE_VALUE (t);
4241 tree cleanup_elt = TREE_PURPOSE (t);
4242 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4244 /* If any of the elements are addressable, so is the entire
4245 union. */
4246 if (TREE_USED (decl_elt))
4247 TREE_USED (decl) = 1;
4249 /* Propagate the union's alignment to the elements. */
4250 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4251 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4253 /* If the element has BLKmode and the union doesn't, the union is
4254 aligned such that the element doesn't need to have BLKmode, so
4255 change the element's mode to the appropriate one for its size. */
4256 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4257 DECL_MODE (decl_elt) = mode
4258 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4260 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4261 instead create a new MEM rtx with the proper mode. */
4262 if (GET_CODE (x) == MEM)
4264 if (mode == GET_MODE (x))
4265 SET_DECL_RTL (decl_elt, x);
4266 else
4267 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4269 else if (GET_CODE (x) == REG)
4271 if (mode == GET_MODE (x))
4272 SET_DECL_RTL (decl_elt, x);
4273 else
4274 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4276 else
4277 abort ();
4279 /* Record the cleanup if there is one. */
4281 if (cleanup != 0)
4282 thisblock->data.block.cleanups
4283 = tree_cons (decl_elt, cleanup_elt,
4284 thisblock->data.block.cleanups);
4288 /* Expand a list of cleanups LIST.
4289 Elements may be expressions or may be nested lists.
4291 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4292 goto and handle protection regions specially in that case.
4294 If REACHABLE, we emit code, otherwise just inform the exception handling
4295 code about this finalization. */
4297 static void
4298 expand_cleanups (tree list, int in_fixup, int reachable)
4300 tree tail;
4301 for (tail = list; tail; tail = TREE_CHAIN (tail))
4302 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4303 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
4304 else
4306 if (! in_fixup && using_eh_for_cleanups_p)
4307 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4309 if (reachable && !CLEANUP_EH_ONLY (tail))
4311 /* Cleanups may be run multiple times. For example,
4312 when exiting a binding contour, we expand the
4313 cleanups associated with that contour. When a goto
4314 within that binding contour has a target outside that
4315 contour, it will expand all cleanups from its scope to
4316 the target. Though the cleanups are expanded multiple
4317 times, the control paths are non-overlapping so the
4318 cleanups will not be executed twice. */
4320 /* We may need to protect from outer cleanups. */
4321 if (in_fixup && using_eh_for_cleanups_p)
4323 expand_eh_region_start ();
4325 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4327 expand_eh_region_end_fixup (TREE_VALUE (tail));
4329 else
4330 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4332 free_temp_slots ();
4337 /* Mark when the context we are emitting RTL for as a conditional
4338 context, so that any cleanup actions we register with
4339 expand_decl_init will be properly conditionalized when those
4340 cleanup actions are later performed. Must be called before any
4341 expression (tree) is expanded that is within a conditional context. */
4343 void
4344 start_cleanup_deferral (void)
4346 /* block_stack can be NULL if we are inside the parameter list. It is
4347 OK to do nothing, because cleanups aren't possible here. */
4348 if (block_stack)
4349 ++block_stack->data.block.conditional_code;
4352 /* Mark the end of a conditional region of code. Because cleanup
4353 deferrals may be nested, we may still be in a conditional region
4354 after we end the currently deferred cleanups, only after we end all
4355 deferred cleanups, are we back in unconditional code. */
4357 void
4358 end_cleanup_deferral (void)
4360 /* block_stack can be NULL if we are inside the parameter list. It is
4361 OK to do nothing, because cleanups aren't possible here. */
4362 if (block_stack)
4363 --block_stack->data.block.conditional_code;
4366 tree
4367 last_cleanup_this_contour (void)
4369 if (block_stack == 0)
4370 return 0;
4372 return block_stack->data.block.cleanups;
4375 /* Return 1 if there are any pending cleanups at this point.
4376 Check the current contour as well as contours that enclose
4377 the current contour. */
4380 any_pending_cleanups (void)
4382 struct nesting *block;
4384 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4385 return 0;
4387 if (block_stack->data.block.cleanups != NULL)
4388 return 1;
4390 if (block_stack->data.block.outer_cleanups == 0)
4391 return 0;
4393 for (block = block_stack->next; block; block = block->next)
4394 if (block->data.block.cleanups != 0)
4395 return 1;
4397 return 0;
4400 /* Enter a case (Pascal) or switch (C) statement.
4401 Push a block onto case_stack and nesting_stack
4402 to accumulate the case-labels that are seen
4403 and to record the labels generated for the statement.
4405 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4406 Otherwise, this construct is transparent for `exit_something'.
4408 EXPR is the index-expression to be dispatched on.
4409 TYPE is its nominal type. We could simply convert EXPR to this type,
4410 but instead we take short cuts. */
4412 void
4413 expand_start_case (int exit_flag, tree expr, tree type,
4414 const char *printname)
4416 struct nesting *thiscase = ALLOC_NESTING ();
4418 /* Make an entry on case_stack for the case we are entering. */
4420 thiscase->desc = CASE_NESTING;
4421 thiscase->next = case_stack;
4422 thiscase->all = nesting_stack;
4423 thiscase->depth = ++nesting_depth;
4424 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4425 thiscase->data.case_stmt.case_list = 0;
4426 thiscase->data.case_stmt.index_expr = expr;
4427 thiscase->data.case_stmt.nominal_type = type;
4428 thiscase->data.case_stmt.default_label = 0;
4429 thiscase->data.case_stmt.printname = printname;
4430 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4431 case_stack = thiscase;
4432 nesting_stack = thiscase;
4434 do_pending_stack_adjust ();
4435 emit_queue ();
4437 /* Make sure case_stmt.start points to something that won't
4438 need any transformation before expand_end_case. */
4439 if (GET_CODE (get_last_insn ()) != NOTE)
4440 emit_note (NOTE_INSN_DELETED);
4442 thiscase->data.case_stmt.start = get_last_insn ();
4444 start_cleanup_deferral ();
4447 /* Start a "dummy case statement" within which case labels are invalid
4448 and are not connected to any larger real case statement.
4449 This can be used if you don't want to let a case statement jump
4450 into the middle of certain kinds of constructs. */
4452 void
4453 expand_start_case_dummy (void)
4455 struct nesting *thiscase = ALLOC_NESTING ();
4457 /* Make an entry on case_stack for the dummy. */
4459 thiscase->desc = CASE_NESTING;
4460 thiscase->next = case_stack;
4461 thiscase->all = nesting_stack;
4462 thiscase->depth = ++nesting_depth;
4463 thiscase->exit_label = 0;
4464 thiscase->data.case_stmt.case_list = 0;
4465 thiscase->data.case_stmt.start = 0;
4466 thiscase->data.case_stmt.nominal_type = 0;
4467 thiscase->data.case_stmt.default_label = 0;
4468 case_stack = thiscase;
4469 nesting_stack = thiscase;
4470 start_cleanup_deferral ();
4473 static void
4474 check_seenlabel (void)
4476 /* If this is the first label, warn if any insns have been emitted. */
4477 if (case_stack->data.case_stmt.line_number_status >= 0)
4479 rtx insn;
4481 restore_line_number_status
4482 (case_stack->data.case_stmt.line_number_status);
4483 case_stack->data.case_stmt.line_number_status = -1;
4485 for (insn = case_stack->data.case_stmt.start;
4486 insn;
4487 insn = NEXT_INSN (insn))
4489 if (GET_CODE (insn) == CODE_LABEL)
4490 break;
4491 if (GET_CODE (insn) != NOTE
4492 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4495 insn = PREV_INSN (insn);
4496 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4498 /* If insn is zero, then there must have been a syntax error. */
4499 if (insn)
4501 location_t locus;
4502 locus.file = NOTE_SOURCE_FILE (insn);
4503 locus.line = NOTE_LINE_NUMBER (insn);
4504 warning ("%Hunreachable code at beginning of %s", &locus,
4505 case_stack->data.case_stmt.printname);
4507 break;
4513 /* Accumulate one case or default label inside a case or switch statement.
4514 VALUE is the value of the case (a null pointer, for a default label).
4515 The function CONVERTER, when applied to arguments T and V,
4516 converts the value V to the type T.
4518 If not currently inside a case or switch statement, return 1 and do
4519 nothing. The caller will print a language-specific error message.
4520 If VALUE is a duplicate or overlaps, return 2 and do nothing
4521 except store the (first) duplicate node in *DUPLICATE.
4522 If VALUE is out of range, return 3 and do nothing.
4523 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4524 Return 0 on success.
4526 Extended to handle range statements. */
4529 pushcase (tree value, tree (*converter) (tree, tree), tree label,
4530 tree *duplicate)
4532 tree index_type;
4533 tree nominal_type;
4535 /* Fail if not inside a real case statement. */
4536 if (! (case_stack && case_stack->data.case_stmt.start))
4537 return 1;
4539 if (stack_block_stack
4540 && stack_block_stack->depth > case_stack->depth)
4541 return 5;
4543 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4544 nominal_type = case_stack->data.case_stmt.nominal_type;
4546 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4547 if (index_type == error_mark_node)
4548 return 0;
4550 /* Convert VALUE to the type in which the comparisons are nominally done. */
4551 if (value != 0)
4552 value = (*converter) (nominal_type, value);
4554 check_seenlabel ();
4556 /* Fail if this value is out of range for the actual type of the index
4557 (which may be narrower than NOMINAL_TYPE). */
4558 if (value != 0
4559 && (TREE_CONSTANT_OVERFLOW (value)
4560 || ! int_fits_type_p (value, index_type)))
4561 return 3;
4563 return add_case_node (value, value, label, duplicate);
4566 /* Like pushcase but this case applies to all values between VALUE1 and
4567 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4568 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4569 starts at VALUE1 and ends at the highest value of the index type.
4570 If both are NULL, this case applies to all values.
4572 The return value is the same as that of pushcase but there is one
4573 additional error code: 4 means the specified range was empty. */
4576 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
4577 tree label, tree *duplicate)
4579 tree index_type;
4580 tree nominal_type;
4582 /* Fail if not inside a real case statement. */
4583 if (! (case_stack && case_stack->data.case_stmt.start))
4584 return 1;
4586 if (stack_block_stack
4587 && stack_block_stack->depth > case_stack->depth)
4588 return 5;
4590 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4591 nominal_type = case_stack->data.case_stmt.nominal_type;
4593 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4594 if (index_type == error_mark_node)
4595 return 0;
4597 check_seenlabel ();
4599 /* Convert VALUEs to type in which the comparisons are nominally done
4600 and replace any unspecified value with the corresponding bound. */
4601 if (value1 == 0)
4602 value1 = TYPE_MIN_VALUE (index_type);
4603 if (value2 == 0)
4604 value2 = TYPE_MAX_VALUE (index_type);
4606 /* Fail if the range is empty. Do this before any conversion since
4607 we want to allow out-of-range empty ranges. */
4608 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4609 return 4;
4611 /* If the max was unbounded, use the max of the nominal_type we are
4612 converting to. Do this after the < check above to suppress false
4613 positives. */
4614 if (value2 == 0)
4615 value2 = TYPE_MAX_VALUE (nominal_type);
4617 value1 = (*converter) (nominal_type, value1);
4618 value2 = (*converter) (nominal_type, value2);
4620 /* Fail if these values are out of range. */
4621 if (TREE_CONSTANT_OVERFLOW (value1)
4622 || ! int_fits_type_p (value1, index_type))
4623 return 3;
4625 if (TREE_CONSTANT_OVERFLOW (value2)
4626 || ! int_fits_type_p (value2, index_type))
4627 return 3;
4629 return add_case_node (value1, value2, label, duplicate);
4632 /* Do the actual insertion of a case label for pushcase and pushcase_range
4633 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4634 slowdown for large switch statements. */
4637 add_case_node (tree low, tree high, tree label, tree *duplicate)
4639 struct case_node *p, **q, *r;
4641 /* If there's no HIGH value, then this is not a case range; it's
4642 just a simple case label. But that's just a degenerate case
4643 range. */
4644 if (!high)
4645 high = low;
4647 /* Handle default labels specially. */
4648 if (!high && !low)
4650 if (case_stack->data.case_stmt.default_label != 0)
4652 *duplicate = case_stack->data.case_stmt.default_label;
4653 return 2;
4655 case_stack->data.case_stmt.default_label = label;
4656 expand_label (label);
4657 return 0;
4660 q = &case_stack->data.case_stmt.case_list;
4661 p = *q;
4663 while ((r = *q))
4665 p = r;
4667 /* Keep going past elements distinctly greater than HIGH. */
4668 if (tree_int_cst_lt (high, p->low))
4669 q = &p->left;
4671 /* or distinctly less than LOW. */
4672 else if (tree_int_cst_lt (p->high, low))
4673 q = &p->right;
4675 else
4677 /* We have an overlap; this is an error. */
4678 *duplicate = p->code_label;
4679 return 2;
4683 /* Add this label to the chain, and succeed. */
4685 r = ggc_alloc (sizeof (struct case_node));
4686 r->low = low;
4688 /* If the bounds are equal, turn this into the one-value case. */
4689 if (tree_int_cst_equal (low, high))
4690 r->high = r->low;
4691 else
4692 r->high = high;
4694 r->code_label = label;
4695 expand_label (label);
4697 *q = r;
4698 r->parent = p;
4699 r->left = 0;
4700 r->right = 0;
4701 r->balance = 0;
4703 while (p)
4705 struct case_node *s;
4707 if (r == p->left)
4709 int b;
4711 if (! (b = p->balance))
4712 /* Growth propagation from left side. */
4713 p->balance = -1;
4714 else if (b < 0)
4716 if (r->balance < 0)
4718 /* R-Rotation */
4719 if ((p->left = s = r->right))
4720 s->parent = p;
4722 r->right = p;
4723 p->balance = 0;
4724 r->balance = 0;
4725 s = p->parent;
4726 p->parent = r;
4728 if ((r->parent = s))
4730 if (s->left == p)
4731 s->left = r;
4732 else
4733 s->right = r;
4735 else
4736 case_stack->data.case_stmt.case_list = r;
4738 else
4739 /* r->balance == +1 */
4741 /* LR-Rotation */
4743 int b2;
4744 struct case_node *t = r->right;
4746 if ((p->left = s = t->right))
4747 s->parent = p;
4749 t->right = p;
4750 if ((r->right = s = t->left))
4751 s->parent = r;
4753 t->left = r;
4754 b = t->balance;
4755 b2 = b < 0;
4756 p->balance = b2;
4757 b2 = -b2 - b;
4758 r->balance = b2;
4759 t->balance = 0;
4760 s = p->parent;
4761 p->parent = t;
4762 r->parent = t;
4764 if ((t->parent = s))
4766 if (s->left == p)
4767 s->left = t;
4768 else
4769 s->right = t;
4771 else
4772 case_stack->data.case_stmt.case_list = t;
4774 break;
4777 else
4779 /* p->balance == +1; growth of left side balances the node. */
4780 p->balance = 0;
4781 break;
4784 else
4785 /* r == p->right */
4787 int b;
4789 if (! (b = p->balance))
4790 /* Growth propagation from right side. */
4791 p->balance++;
4792 else if (b > 0)
4794 if (r->balance > 0)
4796 /* L-Rotation */
4798 if ((p->right = s = r->left))
4799 s->parent = p;
4801 r->left = p;
4802 p->balance = 0;
4803 r->balance = 0;
4804 s = p->parent;
4805 p->parent = r;
4806 if ((r->parent = s))
4808 if (s->left == p)
4809 s->left = r;
4810 else
4811 s->right = r;
4814 else
4815 case_stack->data.case_stmt.case_list = r;
4818 else
4819 /* r->balance == -1 */
4821 /* RL-Rotation */
4822 int b2;
4823 struct case_node *t = r->left;
4825 if ((p->right = s = t->left))
4826 s->parent = p;
4828 t->left = p;
4830 if ((r->left = s = t->right))
4831 s->parent = r;
4833 t->right = r;
4834 b = t->balance;
4835 b2 = b < 0;
4836 r->balance = b2;
4837 b2 = -b2 - b;
4838 p->balance = b2;
4839 t->balance = 0;
4840 s = p->parent;
4841 p->parent = t;
4842 r->parent = t;
4844 if ((t->parent = s))
4846 if (s->left == p)
4847 s->left = t;
4848 else
4849 s->right = t;
4852 else
4853 case_stack->data.case_stmt.case_list = t;
4855 break;
4857 else
4859 /* p->balance == -1; growth of right side balances the node. */
4860 p->balance = 0;
4861 break;
4865 r = p;
4866 p = p->parent;
4869 return 0;
4872 /* Returns the number of possible values of TYPE.
4873 Returns -1 if the number is unknown, variable, or if the number does not
4874 fit in a HOST_WIDE_INT.
4875 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4876 do not increase monotonically (there may be duplicates);
4877 to 1 if the values increase monotonically, but not always by 1;
4878 otherwise sets it to 0. */
4880 HOST_WIDE_INT
4881 all_cases_count (tree type, int *sparseness)
4883 tree t;
4884 HOST_WIDE_INT count, minval, lastval;
4886 *sparseness = 0;
4888 switch (TREE_CODE (type))
4890 case BOOLEAN_TYPE:
4891 count = 2;
4892 break;
4894 case CHAR_TYPE:
4895 count = 1 << BITS_PER_UNIT;
4896 break;
4898 default:
4899 case INTEGER_TYPE:
4900 if (TYPE_MAX_VALUE (type) != 0
4901 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4902 TYPE_MIN_VALUE (type))))
4903 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4904 convert (type, integer_zero_node))))
4905 && host_integerp (t, 1))
4906 count = tree_low_cst (t, 1);
4907 else
4908 return -1;
4909 break;
4911 case ENUMERAL_TYPE:
4912 /* Don't waste time with enumeral types with huge values. */
4913 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4914 || TYPE_MAX_VALUE (type) == 0
4915 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4916 return -1;
4918 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4919 count = 0;
4921 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4923 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4925 if (*sparseness == 2 || thisval <= lastval)
4926 *sparseness = 2;
4927 else if (thisval != minval + count)
4928 *sparseness = 1;
4930 lastval = thisval;
4931 count++;
4935 return count;
4938 #define BITARRAY_TEST(ARRAY, INDEX) \
4939 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4940 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4941 #define BITARRAY_SET(ARRAY, INDEX) \
4942 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4943 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4945 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4946 with the case values we have seen, assuming the case expression
4947 has the given TYPE.
4948 SPARSENESS is as determined by all_cases_count.
4950 The time needed is proportional to COUNT, unless
4951 SPARSENESS is 2, in which case quadratic time is needed. */
4953 void
4954 mark_seen_cases (tree type, unsigned char *cases_seen, HOST_WIDE_INT count,
4955 int sparseness)
4957 tree next_node_to_try = NULL_TREE;
4958 HOST_WIDE_INT next_node_offset = 0;
4960 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4961 tree val = make_node (INTEGER_CST);
4963 TREE_TYPE (val) = type;
4964 if (! root)
4965 /* Do nothing. */
4967 else if (sparseness == 2)
4969 tree t;
4970 unsigned HOST_WIDE_INT xlo;
4972 /* This less efficient loop is only needed to handle
4973 duplicate case values (multiple enum constants
4974 with the same value). */
4975 TREE_TYPE (val) = TREE_TYPE (root->low);
4976 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4977 t = TREE_CHAIN (t), xlo++)
4979 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4980 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4981 n = root;
4984 /* Keep going past elements distinctly greater than VAL. */
4985 if (tree_int_cst_lt (val, n->low))
4986 n = n->left;
4988 /* or distinctly less than VAL. */
4989 else if (tree_int_cst_lt (n->high, val))
4990 n = n->right;
4992 else
4994 /* We have found a matching range. */
4995 BITARRAY_SET (cases_seen, xlo);
4996 break;
4999 while (n);
5002 else
5004 if (root->left)
5005 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5007 for (n = root; n; n = n->right)
5009 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5010 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5011 while (! tree_int_cst_lt (n->high, val))
5013 /* Calculate (into xlo) the "offset" of the integer (val).
5014 The element with lowest value has offset 0, the next smallest
5015 element has offset 1, etc. */
5017 unsigned HOST_WIDE_INT xlo;
5018 HOST_WIDE_INT xhi;
5019 tree t;
5021 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5023 /* The TYPE_VALUES will be in increasing order, so
5024 starting searching where we last ended. */
5025 t = next_node_to_try;
5026 xlo = next_node_offset;
5027 xhi = 0;
5028 for (;;)
5030 if (t == NULL_TREE)
5032 t = TYPE_VALUES (type);
5033 xlo = 0;
5035 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5037 next_node_to_try = TREE_CHAIN (t);
5038 next_node_offset = xlo + 1;
5039 break;
5041 xlo++;
5042 t = TREE_CHAIN (t);
5043 if (t == next_node_to_try)
5045 xlo = -1;
5046 break;
5050 else
5052 t = TYPE_MIN_VALUE (type);
5053 if (t)
5054 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5055 &xlo, &xhi);
5056 else
5057 xlo = xhi = 0;
5058 add_double (xlo, xhi,
5059 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5060 &xlo, &xhi);
5063 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5064 BITARRAY_SET (cases_seen, xlo);
5066 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5067 1, 0,
5068 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5074 /* Given a switch statement with an expression that is an enumeration
5075 type, warn if any of the enumeration type's literals are not
5076 covered by the case expressions of the switch. Also, warn if there
5077 are any extra switch cases that are *not* elements of the
5078 enumerated type.
5080 Historical note:
5082 At one stage this function would: ``If all enumeration literals
5083 were covered by the case expressions, turn one of the expressions
5084 into the default expression since it should not be possible to fall
5085 through such a switch.''
5087 That code has since been removed as: ``This optimization is
5088 disabled because it causes valid programs to fail. ANSI C does not
5089 guarantee that an expression with enum type will have a value that
5090 is the same as one of the enumeration literals.'' */
5092 void
5093 check_for_full_enumeration_handling (tree type)
5095 struct case_node *n;
5096 tree chain;
5098 /* True iff the selector type is a numbered set mode. */
5099 int sparseness = 0;
5101 /* The number of possible selector values. */
5102 HOST_WIDE_INT size;
5104 /* For each possible selector value. a one iff it has been matched
5105 by a case value alternative. */
5106 unsigned char *cases_seen;
5108 /* The allocated size of cases_seen, in chars. */
5109 HOST_WIDE_INT bytes_needed;
5111 size = all_cases_count (type, &sparseness);
5112 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5114 if (size > 0 && size < 600000
5115 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5116 this optimization if we don't have enough memory rather than
5117 aborting, as xmalloc would do. */
5118 && (cases_seen = really_call_calloc (bytes_needed, 1)) != NULL)
5120 HOST_WIDE_INT i;
5121 tree v = TYPE_VALUES (type);
5123 /* The time complexity of this code is normally O(N), where
5124 N being the number of members in the enumerated type.
5125 However, if type is an ENUMERAL_TYPE whose values do not
5126 increase monotonically, O(N*log(N)) time may be needed. */
5128 mark_seen_cases (type, cases_seen, size, sparseness);
5130 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5131 if (BITARRAY_TEST (cases_seen, i) == 0)
5132 warning ("enumeration value `%s' not handled in switch",
5133 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5135 free (cases_seen);
5138 /* Now we go the other way around; we warn if there are case
5139 expressions that don't correspond to enumerators. This can
5140 occur since C and C++ don't enforce type-checking of
5141 assignments to enumeration variables. */
5143 if (case_stack->data.case_stmt.case_list
5144 && case_stack->data.case_stmt.case_list->left)
5145 case_stack->data.case_stmt.case_list
5146 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5147 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5149 for (chain = TYPE_VALUES (type);
5150 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5151 chain = TREE_CHAIN (chain))
5154 if (!chain)
5156 if (TYPE_NAME (type) == 0)
5157 warning ("case value `%ld' not in enumerated type",
5158 (long) TREE_INT_CST_LOW (n->low));
5159 else
5160 warning ("case value `%ld' not in enumerated type `%s'",
5161 (long) TREE_INT_CST_LOW (n->low),
5162 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5163 == IDENTIFIER_NODE)
5164 ? TYPE_NAME (type)
5165 : DECL_NAME (TYPE_NAME (type))));
5167 if (!tree_int_cst_equal (n->low, n->high))
5169 for (chain = TYPE_VALUES (type);
5170 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5171 chain = TREE_CHAIN (chain))
5174 if (!chain)
5176 if (TYPE_NAME (type) == 0)
5177 warning ("case value `%ld' not in enumerated type",
5178 (long) TREE_INT_CST_LOW (n->high));
5179 else
5180 warning ("case value `%ld' not in enumerated type `%s'",
5181 (long) TREE_INT_CST_LOW (n->high),
5182 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5183 == IDENTIFIER_NODE)
5184 ? TYPE_NAME (type)
5185 : DECL_NAME (TYPE_NAME (type))));
5192 /* Maximum number of case bit tests. */
5193 #define MAX_CASE_BIT_TESTS 3
5195 /* By default, enable case bit tests on targets with ashlsi3. */
5196 #ifndef CASE_USE_BIT_TESTS
5197 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
5198 != CODE_FOR_nothing)
5199 #endif
5202 /* A case_bit_test represents a set of case nodes that may be
5203 selected from using a bit-wise comparison. HI and LO hold
5204 the integer to be tested against, LABEL contains the label
5205 to jump to upon success and BITS counts the number of case
5206 nodes handled by this test, typically the number of bits
5207 set in HI:LO. */
5209 struct case_bit_test
5211 HOST_WIDE_INT hi;
5212 HOST_WIDE_INT lo;
5213 rtx label;
5214 int bits;
5217 /* Determine whether "1 << x" is relatively cheap in word_mode. */
5219 static
5220 bool lshift_cheap_p (void)
5222 static bool init = false;
5223 static bool cheap = true;
5225 if (!init)
5227 rtx reg = gen_rtx_REG (word_mode, 10000);
5228 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
5229 cheap = cost < COSTS_N_INSNS (3);
5230 init = true;
5233 return cheap;
5236 /* Comparison function for qsort to order bit tests by decreasing
5237 number of case nodes, i.e. the node with the most cases gets
5238 tested first. */
5240 static
5241 int case_bit_test_cmp (const void *p1, const void *p2)
5243 const struct case_bit_test *d1 = p1;
5244 const struct case_bit_test *d2 = p2;
5246 return d2->bits - d1->bits;
5249 /* Expand a switch statement by a short sequence of bit-wise
5250 comparisons. "switch(x)" is effectively converted into
5251 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
5252 integer constants.
5254 INDEX_EXPR is the value being switched on, which is of
5255 type INDEX_TYPE. MINVAL is the lowest case value of in
5256 the case nodes, of INDEX_TYPE type, and RANGE is highest
5257 value minus MINVAL, also of type INDEX_TYPE. NODES is
5258 the set of case nodes, and DEFAULT_LABEL is the label to
5259 branch to should none of the cases match.
5261 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
5262 node targets. */
5264 static void
5265 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
5266 tree range, case_node_ptr nodes, rtx default_label)
5268 struct case_bit_test test[MAX_CASE_BIT_TESTS];
5269 enum machine_mode mode;
5270 rtx expr, index, label;
5271 unsigned int i,j,lo,hi;
5272 struct case_node *n;
5273 unsigned int count;
5275 count = 0;
5276 for (n = nodes; n; n = n->right)
5278 label = label_rtx (n->code_label);
5279 for (i = 0; i < count; i++)
5280 if (same_case_target_p (label, test[i].label))
5281 break;
5283 if (i == count)
5285 if (count >= MAX_CASE_BIT_TESTS)
5286 abort ();
5287 test[i].hi = 0;
5288 test[i].lo = 0;
5289 test[i].label = label;
5290 test[i].bits = 1;
5291 count++;
5293 else
5294 test[i].bits++;
5296 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5297 n->low, minval)), 1);
5298 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5299 n->high, minval)), 1);
5300 for (j = lo; j <= hi; j++)
5301 if (j >= HOST_BITS_PER_WIDE_INT)
5302 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
5303 else
5304 test[i].lo |= (HOST_WIDE_INT) 1 << j;
5307 qsort (test, count, sizeof(*test), case_bit_test_cmp);
5309 index_expr = fold (build (MINUS_EXPR, index_type,
5310 convert (index_type, index_expr),
5311 convert (index_type, minval)));
5312 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5313 emit_queue ();
5314 index = protect_from_queue (index, 0);
5315 do_pending_stack_adjust ();
5317 mode = TYPE_MODE (index_type);
5318 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
5319 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
5320 default_label);
5322 index = convert_to_mode (word_mode, index, 0);
5323 index = expand_binop (word_mode, ashl_optab, const1_rtx,
5324 index, NULL_RTX, 1, OPTAB_WIDEN);
5326 for (i = 0; i < count; i++)
5328 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
5329 expr = expand_binop (word_mode, and_optab, index, expr,
5330 NULL_RTX, 1, OPTAB_WIDEN);
5331 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
5332 word_mode, 1, test[i].label);
5335 emit_jump (default_label);
5338 #ifndef HAVE_casesi
5339 #define HAVE_casesi 0
5340 #endif
5342 #ifndef HAVE_tablejump
5343 #define HAVE_tablejump 0
5344 #endif
5346 /* Terminate a case (Pascal) or switch (C) statement
5347 in which ORIG_INDEX is the expression to be tested.
5348 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5349 type as given in the source before any compiler conversions.
5350 Generate the code to test it and jump to the right place. */
5352 void
5353 expand_end_case_type (tree orig_index, tree orig_type)
5355 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5356 rtx default_label = 0;
5357 struct case_node *n, *m;
5358 unsigned int count, uniq;
5359 rtx index;
5360 rtx table_label;
5361 int ncases;
5362 rtx *labelvec;
5363 int i;
5364 rtx before_case, end, lab;
5365 struct nesting *thiscase = case_stack;
5366 tree index_expr, index_type;
5367 bool exit_done = false;
5368 int unsignedp;
5370 /* Don't crash due to previous errors. */
5371 if (thiscase == NULL)
5372 return;
5374 index_expr = thiscase->data.case_stmt.index_expr;
5375 index_type = TREE_TYPE (index_expr);
5376 unsignedp = TREE_UNSIGNED (index_type);
5377 if (orig_type == NULL)
5378 orig_type = TREE_TYPE (orig_index);
5380 do_pending_stack_adjust ();
5382 /* This might get a spurious warning in the presence of a syntax error;
5383 it could be fixed by moving the call to check_seenlabel after the
5384 check for error_mark_node, and copying the code of check_seenlabel that
5385 deals with case_stack->data.case_stmt.line_number_status /
5386 restore_line_number_status in front of the call to end_cleanup_deferral;
5387 However, this might miss some useful warnings in the presence of
5388 non-syntax errors. */
5389 check_seenlabel ();
5391 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5392 if (index_type != error_mark_node)
5394 /* If the switch expression was an enumerated type, check that
5395 exactly all enumeration literals are covered by the cases.
5396 The check is made when -Wswitch was specified and there is no
5397 default case, or when -Wswitch-enum was specified. */
5398 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5399 || warn_switch_enum)
5400 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5401 && TREE_CODE (index_expr) != INTEGER_CST)
5402 check_for_full_enumeration_handling (orig_type);
5404 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5405 warning ("switch missing default case");
5407 /* If we don't have a default-label, create one here,
5408 after the body of the switch. */
5409 if (thiscase->data.case_stmt.default_label == 0)
5411 thiscase->data.case_stmt.default_label
5412 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5413 /* Share the exit label if possible. */
5414 if (thiscase->exit_label)
5416 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
5417 thiscase->exit_label);
5418 exit_done = true;
5420 expand_label (thiscase->data.case_stmt.default_label);
5422 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5424 before_case = get_last_insn ();
5426 if (thiscase->data.case_stmt.case_list
5427 && thiscase->data.case_stmt.case_list->left)
5428 thiscase->data.case_stmt.case_list
5429 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5431 /* Simplify the case-list before we count it. */
5432 group_case_nodes (thiscase->data.case_stmt.case_list);
5433 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5434 default_label);
5436 /* Get upper and lower bounds of case values.
5437 Also convert all the case values to the index expr's data type. */
5439 uniq = 0;
5440 count = 0;
5441 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5443 /* Check low and high label values are integers. */
5444 if (TREE_CODE (n->low) != INTEGER_CST)
5445 abort ();
5446 if (TREE_CODE (n->high) != INTEGER_CST)
5447 abort ();
5449 n->low = convert (index_type, n->low);
5450 n->high = convert (index_type, n->high);
5452 /* Count the elements and track the largest and smallest
5453 of them (treating them as signed even if they are not). */
5454 if (count++ == 0)
5456 minval = n->low;
5457 maxval = n->high;
5459 else
5461 if (INT_CST_LT (n->low, minval))
5462 minval = n->low;
5463 if (INT_CST_LT (maxval, n->high))
5464 maxval = n->high;
5466 /* A range counts double, since it requires two compares. */
5467 if (! tree_int_cst_equal (n->low, n->high))
5468 count++;
5470 /* Count the number of unique case node targets. */
5471 uniq++;
5472 lab = label_rtx (n->code_label);
5473 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
5474 if (same_case_target_p (label_rtx (m->code_label), lab))
5476 uniq--;
5477 break;
5481 /* Compute span of values. */
5482 if (count != 0)
5483 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5485 end_cleanup_deferral ();
5487 if (count == 0)
5489 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5490 emit_queue ();
5491 emit_jump (default_label);
5494 /* Try implementing this switch statement by a short sequence of
5495 bit-wise comparisons. However, we let the binary-tree case
5496 below handle constant index expressions. */
5497 else if (CASE_USE_BIT_TESTS
5498 && ! TREE_CONSTANT (index_expr)
5499 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
5500 && compare_tree_int (range, 0) > 0
5501 && lshift_cheap_p ()
5502 && ((uniq == 1 && count >= 3)
5503 || (uniq == 2 && count >= 5)
5504 || (uniq == 3 && count >= 6)))
5506 /* Optimize the case where all the case values fit in a
5507 word without having to subtract MINVAL. In this case,
5508 we can optimize away the subtraction. */
5509 if (compare_tree_int (minval, 0) > 0
5510 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
5512 minval = integer_zero_node;
5513 range = maxval;
5515 emit_case_bit_tests (index_type, index_expr, minval, range,
5516 thiscase->data.case_stmt.case_list,
5517 default_label);
5520 /* If range of values is much bigger than number of values,
5521 make a sequence of conditional branches instead of a dispatch.
5522 If the switch-index is a constant, do it this way
5523 because we can optimize it. */
5525 else if (count < case_values_threshold ()
5526 || compare_tree_int (range,
5527 (optimize_size ? 3 : 10) * count) > 0
5528 /* RANGE may be signed, and really large ranges will show up
5529 as negative numbers. */
5530 || compare_tree_int (range, 0) < 0
5531 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5532 || flag_pic
5533 #endif
5534 || TREE_CONSTANT (index_expr)
5535 /* If neither casesi or tablejump is available, we can
5536 only go this way. */
5537 || (!HAVE_casesi && !HAVE_tablejump))
5539 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5541 /* If the index is a short or char that we do not have
5542 an insn to handle comparisons directly, convert it to
5543 a full integer now, rather than letting each comparison
5544 generate the conversion. */
5546 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5547 && ! have_insn_for (COMPARE, GET_MODE (index)))
5549 enum machine_mode wider_mode;
5550 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5551 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5552 if (have_insn_for (COMPARE, wider_mode))
5554 index = convert_to_mode (wider_mode, index, unsignedp);
5555 break;
5559 emit_queue ();
5560 do_pending_stack_adjust ();
5562 index = protect_from_queue (index, 0);
5563 if (GET_CODE (index) == MEM)
5564 index = copy_to_reg (index);
5565 if (GET_CODE (index) == CONST_INT
5566 || TREE_CODE (index_expr) == INTEGER_CST)
5568 /* Make a tree node with the proper constant value
5569 if we don't already have one. */
5570 if (TREE_CODE (index_expr) != INTEGER_CST)
5572 index_expr
5573 = build_int_2 (INTVAL (index),
5574 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5575 index_expr = convert (index_type, index_expr);
5578 /* For constant index expressions we need only
5579 issue an unconditional branch to the appropriate
5580 target code. The job of removing any unreachable
5581 code is left to the optimization phase if the
5582 "-O" option is specified. */
5583 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5584 if (! tree_int_cst_lt (index_expr, n->low)
5585 && ! tree_int_cst_lt (n->high, index_expr))
5586 break;
5588 if (n)
5589 emit_jump (label_rtx (n->code_label));
5590 else
5591 emit_jump (default_label);
5593 else
5595 /* If the index expression is not constant we generate
5596 a binary decision tree to select the appropriate
5597 target code. This is done as follows:
5599 The list of cases is rearranged into a binary tree,
5600 nearly optimal assuming equal probability for each case.
5602 The tree is transformed into RTL, eliminating
5603 redundant test conditions at the same time.
5605 If program flow could reach the end of the
5606 decision tree an unconditional jump to the
5607 default code is emitted. */
5609 use_cost_table
5610 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5611 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5612 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5613 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5614 default_label, index_type);
5615 emit_jump_if_reachable (default_label);
5618 else
5620 table_label = gen_label_rtx ();
5621 if (! try_casesi (index_type, index_expr, minval, range,
5622 table_label, default_label))
5624 index_type = thiscase->data.case_stmt.nominal_type;
5626 /* Index jumptables from zero for suitable values of
5627 minval to avoid a subtraction. */
5628 if (! optimize_size
5629 && compare_tree_int (minval, 0) > 0
5630 && compare_tree_int (minval, 3) < 0)
5632 minval = integer_zero_node;
5633 range = maxval;
5636 if (! try_tablejump (index_type, index_expr, minval, range,
5637 table_label, default_label))
5638 abort ();
5641 /* Get table of labels to jump to, in order of case index. */
5643 ncases = tree_low_cst (range, 0) + 1;
5644 labelvec = alloca (ncases * sizeof (rtx));
5645 memset (labelvec, 0, ncases * sizeof (rtx));
5647 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5649 /* Compute the low and high bounds relative to the minimum
5650 value since that should fit in a HOST_WIDE_INT while the
5651 actual values may not. */
5652 HOST_WIDE_INT i_low
5653 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5654 n->low, minval)), 1);
5655 HOST_WIDE_INT i_high
5656 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5657 n->high, minval)), 1);
5658 HOST_WIDE_INT i;
5660 for (i = i_low; i <= i_high; i ++)
5661 labelvec[i]
5662 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5665 /* Fill in the gaps with the default. */
5666 for (i = 0; i < ncases; i++)
5667 if (labelvec[i] == 0)
5668 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5670 /* Output the table. */
5671 emit_label (table_label);
5673 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5674 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5675 gen_rtx_LABEL_REF (Pmode, table_label),
5676 gen_rtvec_v (ncases, labelvec),
5677 const0_rtx, const0_rtx));
5678 else
5679 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5680 gen_rtvec_v (ncases, labelvec)));
5682 /* If the case insn drops through the table,
5683 after the table we must jump to the default-label.
5684 Otherwise record no drop-through after the table. */
5685 #ifdef CASE_DROPS_THROUGH
5686 emit_jump (default_label);
5687 #else
5688 emit_barrier ();
5689 #endif
5692 before_case = NEXT_INSN (before_case);
5693 end = get_last_insn ();
5694 if (squeeze_notes (&before_case, &end))
5695 abort ();
5696 reorder_insns (before_case, end,
5697 thiscase->data.case_stmt.start);
5699 else
5700 end_cleanup_deferral ();
5702 if (thiscase->exit_label && !exit_done)
5703 emit_label (thiscase->exit_label);
5705 POPSTACK (case_stack);
5707 free_temp_slots ();
5710 /* Convert the tree NODE into a list linked by the right field, with the left
5711 field zeroed. RIGHT is used for recursion; it is a list to be placed
5712 rightmost in the resulting list. */
5714 static struct case_node *
5715 case_tree2list (struct case_node *node, struct case_node *right)
5717 struct case_node *left;
5719 if (node->right)
5720 right = case_tree2list (node->right, right);
5722 node->right = right;
5723 if ((left = node->left))
5725 node->left = 0;
5726 return case_tree2list (left, node);
5729 return node;
5732 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5734 static void
5735 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
5737 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5739 if (op1 == op2)
5740 emit_jump (label);
5742 else
5743 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5744 (GET_MODE (op1) == VOIDmode
5745 ? GET_MODE (op2) : GET_MODE (op1)),
5746 unsignedp, label);
5749 /* Not all case values are encountered equally. This function
5750 uses a heuristic to weight case labels, in cases where that
5751 looks like a reasonable thing to do.
5753 Right now, all we try to guess is text, and we establish the
5754 following weights:
5756 chars above space: 16
5757 digits: 16
5758 default: 12
5759 space, punct: 8
5760 tab: 4
5761 newline: 2
5762 other "\" chars: 1
5763 remaining chars: 0
5765 If we find any cases in the switch that are not either -1 or in the range
5766 of valid ASCII characters, or are control characters other than those
5767 commonly used with "\", don't treat this switch scanning text.
5769 Return 1 if these nodes are suitable for cost estimation, otherwise
5770 return 0. */
5772 static int
5773 estimate_case_costs (case_node_ptr node)
5775 tree min_ascii = integer_minus_one_node;
5776 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5777 case_node_ptr n;
5778 int i;
5780 /* If we haven't already made the cost table, make it now. Note that the
5781 lower bound of the table is -1, not zero. */
5783 if (! cost_table_initialized)
5785 cost_table_initialized = 1;
5787 for (i = 0; i < 128; i++)
5789 if (ISALNUM (i))
5790 COST_TABLE (i) = 16;
5791 else if (ISPUNCT (i))
5792 COST_TABLE (i) = 8;
5793 else if (ISCNTRL (i))
5794 COST_TABLE (i) = -1;
5797 COST_TABLE (' ') = 8;
5798 COST_TABLE ('\t') = 4;
5799 COST_TABLE ('\0') = 4;
5800 COST_TABLE ('\n') = 2;
5801 COST_TABLE ('\f') = 1;
5802 COST_TABLE ('\v') = 1;
5803 COST_TABLE ('\b') = 1;
5806 /* See if all the case expressions look like text. It is text if the
5807 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5808 as signed arithmetic since we don't want to ever access cost_table with a
5809 value less than -1. Also check that none of the constants in a range
5810 are strange control characters. */
5812 for (n = node; n; n = n->right)
5814 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5815 return 0;
5817 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5818 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5819 if (COST_TABLE (i) < 0)
5820 return 0;
5823 /* All interesting values are within the range of interesting
5824 ASCII characters. */
5825 return 1;
5828 /* Determine whether two case labels branch to the same target. */
5830 static bool
5831 same_case_target_p (rtx l1, rtx l2)
5833 rtx i1, i2;
5835 if (l1 == l2)
5836 return true;
5838 i1 = next_real_insn (l1);
5839 i2 = next_real_insn (l2);
5840 if (i1 == i2)
5841 return true;
5843 if (i1 && simplejump_p (i1))
5845 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5848 if (i2 && simplejump_p (i2))
5850 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5852 return l1 == l2;
5855 /* Delete nodes that branch to the default label from a list of
5856 case nodes. Eg. case 5: default: becomes just default: */
5858 static void
5859 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
5861 case_node_ptr ptr;
5863 while (*prev)
5865 ptr = *prev;
5866 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5867 *prev = ptr->right;
5868 else
5869 prev = &ptr->right;
5873 /* Scan an ordered list of case nodes
5874 combining those with consecutive values or ranges.
5876 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5878 static void
5879 group_case_nodes (case_node_ptr head)
5881 case_node_ptr node = head;
5883 while (node)
5885 rtx lab = label_rtx (node->code_label);
5886 case_node_ptr np = node;
5888 /* Try to group the successors of NODE with NODE. */
5889 while (((np = np->right) != 0)
5890 /* Do they jump to the same place? */
5891 && same_case_target_p (label_rtx (np->code_label), lab)
5892 /* Are their ranges consecutive? */
5893 && tree_int_cst_equal (np->low,
5894 fold (build (PLUS_EXPR,
5895 TREE_TYPE (node->high),
5896 node->high,
5897 integer_one_node)))
5898 /* An overflow is not consecutive. */
5899 && tree_int_cst_lt (node->high,
5900 fold (build (PLUS_EXPR,
5901 TREE_TYPE (node->high),
5902 node->high,
5903 integer_one_node))))
5905 node->high = np->high;
5907 /* NP is the first node after NODE which can't be grouped with it.
5908 Delete the nodes in between, and move on to that node. */
5909 node->right = np;
5910 node = np;
5914 /* Take an ordered list of case nodes
5915 and transform them into a near optimal binary tree,
5916 on the assumption that any target code selection value is as
5917 likely as any other.
5919 The transformation is performed by splitting the ordered
5920 list into two equal sections plus a pivot. The parts are
5921 then attached to the pivot as left and right branches. Each
5922 branch is then transformed recursively. */
5924 static void
5925 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
5927 case_node_ptr np;
5929 np = *head;
5930 if (np)
5932 int cost = 0;
5933 int i = 0;
5934 int ranges = 0;
5935 case_node_ptr *npp;
5936 case_node_ptr left;
5938 /* Count the number of entries on branch. Also count the ranges. */
5940 while (np)
5942 if (!tree_int_cst_equal (np->low, np->high))
5944 ranges++;
5945 if (use_cost_table)
5946 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5949 if (use_cost_table)
5950 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5952 i++;
5953 np = np->right;
5956 if (i > 2)
5958 /* Split this list if it is long enough for that to help. */
5959 npp = head;
5960 left = *npp;
5961 if (use_cost_table)
5963 /* Find the place in the list that bisects the list's total cost,
5964 Here I gets half the total cost. */
5965 int n_moved = 0;
5966 i = (cost + 1) / 2;
5967 while (1)
5969 /* Skip nodes while their cost does not reach that amount. */
5970 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5971 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5972 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5973 if (i <= 0)
5974 break;
5975 npp = &(*npp)->right;
5976 n_moved += 1;
5978 if (n_moved == 0)
5980 /* Leave this branch lopsided, but optimize left-hand
5981 side and fill in `parent' fields for right-hand side. */
5982 np = *head;
5983 np->parent = parent;
5984 balance_case_nodes (&np->left, np);
5985 for (; np->right; np = np->right)
5986 np->right->parent = np;
5987 return;
5990 /* If there are just three nodes, split at the middle one. */
5991 else if (i == 3)
5992 npp = &(*npp)->right;
5993 else
5995 /* Find the place in the list that bisects the list's total cost,
5996 where ranges count as 2.
5997 Here I gets half the total cost. */
5998 i = (i + ranges + 1) / 2;
5999 while (1)
6001 /* Skip nodes while their cost does not reach that amount. */
6002 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
6003 i--;
6004 i--;
6005 if (i <= 0)
6006 break;
6007 npp = &(*npp)->right;
6010 *head = np = *npp;
6011 *npp = 0;
6012 np->parent = parent;
6013 np->left = left;
6015 /* Optimize each of the two split parts. */
6016 balance_case_nodes (&np->left, np);
6017 balance_case_nodes (&np->right, np);
6019 else
6021 /* Else leave this branch as one level,
6022 but fill in `parent' fields. */
6023 np = *head;
6024 np->parent = parent;
6025 for (; np->right; np = np->right)
6026 np->right->parent = np;
6031 /* Search the parent sections of the case node tree
6032 to see if a test for the lower bound of NODE would be redundant.
6033 INDEX_TYPE is the type of the index expression.
6035 The instructions to generate the case decision tree are
6036 output in the same order as nodes are processed so it is
6037 known that if a parent node checks the range of the current
6038 node minus one that the current node is bounded at its lower
6039 span. Thus the test would be redundant. */
6041 static int
6042 node_has_low_bound (case_node_ptr node, tree index_type)
6044 tree low_minus_one;
6045 case_node_ptr pnode;
6047 /* If the lower bound of this node is the lowest value in the index type,
6048 we need not test it. */
6050 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6051 return 1;
6053 /* If this node has a left branch, the value at the left must be less
6054 than that at this node, so it cannot be bounded at the bottom and
6055 we need not bother testing any further. */
6057 if (node->left)
6058 return 0;
6060 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6061 node->low, integer_one_node));
6063 /* If the subtraction above overflowed, we can't verify anything.
6064 Otherwise, look for a parent that tests our value - 1. */
6066 if (! tree_int_cst_lt (low_minus_one, node->low))
6067 return 0;
6069 for (pnode = node->parent; pnode; pnode = pnode->parent)
6070 if (tree_int_cst_equal (low_minus_one, pnode->high))
6071 return 1;
6073 return 0;
6076 /* Search the parent sections of the case node tree
6077 to see if a test for the upper bound of NODE would be redundant.
6078 INDEX_TYPE is the type of the index expression.
6080 The instructions to generate the case decision tree are
6081 output in the same order as nodes are processed so it is
6082 known that if a parent node checks the range of the current
6083 node plus one that the current node is bounded at its upper
6084 span. Thus the test would be redundant. */
6086 static int
6087 node_has_high_bound (case_node_ptr node, tree index_type)
6089 tree high_plus_one;
6090 case_node_ptr pnode;
6092 /* If there is no upper bound, obviously no test is needed. */
6094 if (TYPE_MAX_VALUE (index_type) == NULL)
6095 return 1;
6097 /* If the upper bound of this node is the highest value in the type
6098 of the index expression, we need not test against it. */
6100 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6101 return 1;
6103 /* If this node has a right branch, the value at the right must be greater
6104 than that at this node, so it cannot be bounded at the top and
6105 we need not bother testing any further. */
6107 if (node->right)
6108 return 0;
6110 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6111 node->high, integer_one_node));
6113 /* If the addition above overflowed, we can't verify anything.
6114 Otherwise, look for a parent that tests our value + 1. */
6116 if (! tree_int_cst_lt (node->high, high_plus_one))
6117 return 0;
6119 for (pnode = node->parent; pnode; pnode = pnode->parent)
6120 if (tree_int_cst_equal (high_plus_one, pnode->low))
6121 return 1;
6123 return 0;
6126 /* Search the parent sections of the
6127 case node tree to see if both tests for the upper and lower
6128 bounds of NODE would be redundant. */
6130 static int
6131 node_is_bounded (case_node_ptr node, tree index_type)
6133 return (node_has_low_bound (node, index_type)
6134 && node_has_high_bound (node, index_type));
6137 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6139 static void
6140 emit_jump_if_reachable (rtx label)
6142 if (GET_CODE (get_last_insn ()) != BARRIER)
6143 emit_jump (label);
6146 /* Emit step-by-step code to select a case for the value of INDEX.
6147 The thus generated decision tree follows the form of the
6148 case-node binary tree NODE, whose nodes represent test conditions.
6149 INDEX_TYPE is the type of the index of the switch.
6151 Care is taken to prune redundant tests from the decision tree
6152 by detecting any boundary conditions already checked by
6153 emitted rtx. (See node_has_high_bound, node_has_low_bound
6154 and node_is_bounded, above.)
6156 Where the test conditions can be shown to be redundant we emit
6157 an unconditional jump to the target code. As a further
6158 optimization, the subordinates of a tree node are examined to
6159 check for bounded nodes. In this case conditional and/or
6160 unconditional jumps as a result of the boundary check for the
6161 current node are arranged to target the subordinates associated
6162 code for out of bound conditions on the current node.
6164 We can assume that when control reaches the code generated here,
6165 the index value has already been compared with the parents
6166 of this node, and determined to be on the same side of each parent
6167 as this node is. Thus, if this node tests for the value 51,
6168 and a parent tested for 52, we don't need to consider
6169 the possibility of a value greater than 51. If another parent
6170 tests for the value 50, then this node need not test anything. */
6172 static void
6173 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
6174 tree index_type)
6176 /* If INDEX has an unsigned type, we must make unsigned branches. */
6177 int unsignedp = TREE_UNSIGNED (index_type);
6178 enum machine_mode mode = GET_MODE (index);
6179 enum machine_mode imode = TYPE_MODE (index_type);
6181 /* See if our parents have already tested everything for us.
6182 If they have, emit an unconditional jump for this node. */
6183 if (node_is_bounded (node, index_type))
6184 emit_jump (label_rtx (node->code_label));
6186 else if (tree_int_cst_equal (node->low, node->high))
6188 /* Node is single valued. First see if the index expression matches
6189 this node and then check our children, if any. */
6191 do_jump_if_equal (index,
6192 convert_modes (mode, imode,
6193 expand_expr (node->low, NULL_RTX,
6194 VOIDmode, 0),
6195 unsignedp),
6196 label_rtx (node->code_label), unsignedp);
6198 if (node->right != 0 && node->left != 0)
6200 /* This node has children on both sides.
6201 Dispatch to one side or the other
6202 by comparing the index value with this node's value.
6203 If one subtree is bounded, check that one first,
6204 so we can avoid real branches in the tree. */
6206 if (node_is_bounded (node->right, index_type))
6208 emit_cmp_and_jump_insns (index,
6209 convert_modes
6210 (mode, imode,
6211 expand_expr (node->high, NULL_RTX,
6212 VOIDmode, 0),
6213 unsignedp),
6214 GT, NULL_RTX, mode, unsignedp,
6215 label_rtx (node->right->code_label));
6216 emit_case_nodes (index, node->left, default_label, index_type);
6219 else if (node_is_bounded (node->left, index_type))
6221 emit_cmp_and_jump_insns (index,
6222 convert_modes
6223 (mode, imode,
6224 expand_expr (node->high, NULL_RTX,
6225 VOIDmode, 0),
6226 unsignedp),
6227 LT, NULL_RTX, mode, unsignedp,
6228 label_rtx (node->left->code_label));
6229 emit_case_nodes (index, node->right, default_label, index_type);
6232 else
6234 /* Neither node is bounded. First distinguish the two sides;
6235 then emit the code for one side at a time. */
6237 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6239 /* See if the value is on the right. */
6240 emit_cmp_and_jump_insns (index,
6241 convert_modes
6242 (mode, imode,
6243 expand_expr (node->high, NULL_RTX,
6244 VOIDmode, 0),
6245 unsignedp),
6246 GT, NULL_RTX, mode, unsignedp,
6247 label_rtx (test_label));
6249 /* Value must be on the left.
6250 Handle the left-hand subtree. */
6251 emit_case_nodes (index, node->left, default_label, index_type);
6252 /* If left-hand subtree does nothing,
6253 go to default. */
6254 emit_jump_if_reachable (default_label);
6256 /* Code branches here for the right-hand subtree. */
6257 expand_label (test_label);
6258 emit_case_nodes (index, node->right, default_label, index_type);
6262 else if (node->right != 0 && node->left == 0)
6264 /* Here we have a right child but no left so we issue conditional
6265 branch to default and process the right child.
6267 Omit the conditional branch to default if we it avoid only one
6268 right child; it costs too much space to save so little time. */
6270 if (node->right->right || node->right->left
6271 || !tree_int_cst_equal (node->right->low, node->right->high))
6273 if (!node_has_low_bound (node, index_type))
6275 emit_cmp_and_jump_insns (index,
6276 convert_modes
6277 (mode, imode,
6278 expand_expr (node->high, NULL_RTX,
6279 VOIDmode, 0),
6280 unsignedp),
6281 LT, NULL_RTX, mode, unsignedp,
6282 default_label);
6285 emit_case_nodes (index, node->right, default_label, index_type);
6287 else
6288 /* We cannot process node->right normally
6289 since we haven't ruled out the numbers less than
6290 this node's value. So handle node->right explicitly. */
6291 do_jump_if_equal (index,
6292 convert_modes
6293 (mode, imode,
6294 expand_expr (node->right->low, NULL_RTX,
6295 VOIDmode, 0),
6296 unsignedp),
6297 label_rtx (node->right->code_label), unsignedp);
6300 else if (node->right == 0 && node->left != 0)
6302 /* Just one subtree, on the left. */
6303 if (node->left->left || node->left->right
6304 || !tree_int_cst_equal (node->left->low, node->left->high))
6306 if (!node_has_high_bound (node, index_type))
6308 emit_cmp_and_jump_insns (index,
6309 convert_modes
6310 (mode, imode,
6311 expand_expr (node->high, NULL_RTX,
6312 VOIDmode, 0),
6313 unsignedp),
6314 GT, NULL_RTX, mode, unsignedp,
6315 default_label);
6318 emit_case_nodes (index, node->left, default_label, index_type);
6320 else
6321 /* We cannot process node->left normally
6322 since we haven't ruled out the numbers less than
6323 this node's value. So handle node->left explicitly. */
6324 do_jump_if_equal (index,
6325 convert_modes
6326 (mode, imode,
6327 expand_expr (node->left->low, NULL_RTX,
6328 VOIDmode, 0),
6329 unsignedp),
6330 label_rtx (node->left->code_label), unsignedp);
6333 else
6335 /* Node is a range. These cases are very similar to those for a single
6336 value, except that we do not start by testing whether this node
6337 is the one to branch to. */
6339 if (node->right != 0 && node->left != 0)
6341 /* Node has subtrees on both sides.
6342 If the right-hand subtree is bounded,
6343 test for it first, since we can go straight there.
6344 Otherwise, we need to make a branch in the control structure,
6345 then handle the two subtrees. */
6346 tree test_label = 0;
6348 if (node_is_bounded (node->right, index_type))
6349 /* Right hand node is fully bounded so we can eliminate any
6350 testing and branch directly to the target code. */
6351 emit_cmp_and_jump_insns (index,
6352 convert_modes
6353 (mode, imode,
6354 expand_expr (node->high, NULL_RTX,
6355 VOIDmode, 0),
6356 unsignedp),
6357 GT, NULL_RTX, mode, unsignedp,
6358 label_rtx (node->right->code_label));
6359 else
6361 /* Right hand node requires testing.
6362 Branch to a label where we will handle it later. */
6364 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6365 emit_cmp_and_jump_insns (index,
6366 convert_modes
6367 (mode, imode,
6368 expand_expr (node->high, NULL_RTX,
6369 VOIDmode, 0),
6370 unsignedp),
6371 GT, NULL_RTX, mode, unsignedp,
6372 label_rtx (test_label));
6375 /* Value belongs to this node or to the left-hand subtree. */
6377 emit_cmp_and_jump_insns (index,
6378 convert_modes
6379 (mode, imode,
6380 expand_expr (node->low, NULL_RTX,
6381 VOIDmode, 0),
6382 unsignedp),
6383 GE, NULL_RTX, mode, unsignedp,
6384 label_rtx (node->code_label));
6386 /* Handle the left-hand subtree. */
6387 emit_case_nodes (index, node->left, default_label, index_type);
6389 /* If right node had to be handled later, do that now. */
6391 if (test_label)
6393 /* If the left-hand subtree fell through,
6394 don't let it fall into the right-hand subtree. */
6395 emit_jump_if_reachable (default_label);
6397 expand_label (test_label);
6398 emit_case_nodes (index, node->right, default_label, index_type);
6402 else if (node->right != 0 && node->left == 0)
6404 /* Deal with values to the left of this node,
6405 if they are possible. */
6406 if (!node_has_low_bound (node, index_type))
6408 emit_cmp_and_jump_insns (index,
6409 convert_modes
6410 (mode, imode,
6411 expand_expr (node->low, NULL_RTX,
6412 VOIDmode, 0),
6413 unsignedp),
6414 LT, NULL_RTX, mode, unsignedp,
6415 default_label);
6418 /* Value belongs to this node or to the right-hand subtree. */
6420 emit_cmp_and_jump_insns (index,
6421 convert_modes
6422 (mode, imode,
6423 expand_expr (node->high, NULL_RTX,
6424 VOIDmode, 0),
6425 unsignedp),
6426 LE, NULL_RTX, mode, unsignedp,
6427 label_rtx (node->code_label));
6429 emit_case_nodes (index, node->right, default_label, index_type);
6432 else if (node->right == 0 && node->left != 0)
6434 /* Deal with values to the right of this node,
6435 if they are possible. */
6436 if (!node_has_high_bound (node, index_type))
6438 emit_cmp_and_jump_insns (index,
6439 convert_modes
6440 (mode, imode,
6441 expand_expr (node->high, NULL_RTX,
6442 VOIDmode, 0),
6443 unsignedp),
6444 GT, NULL_RTX, mode, unsignedp,
6445 default_label);
6448 /* Value belongs to this node or to the left-hand subtree. */
6450 emit_cmp_and_jump_insns (index,
6451 convert_modes
6452 (mode, imode,
6453 expand_expr (node->low, NULL_RTX,
6454 VOIDmode, 0),
6455 unsignedp),
6456 GE, NULL_RTX, mode, unsignedp,
6457 label_rtx (node->code_label));
6459 emit_case_nodes (index, node->left, default_label, index_type);
6462 else
6464 /* Node has no children so we check low and high bounds to remove
6465 redundant tests. Only one of the bounds can exist,
6466 since otherwise this node is bounded--a case tested already. */
6467 int high_bound = node_has_high_bound (node, index_type);
6468 int low_bound = node_has_low_bound (node, index_type);
6470 if (!high_bound && low_bound)
6472 emit_cmp_and_jump_insns (index,
6473 convert_modes
6474 (mode, imode,
6475 expand_expr (node->high, NULL_RTX,
6476 VOIDmode, 0),
6477 unsignedp),
6478 GT, NULL_RTX, mode, unsignedp,
6479 default_label);
6482 else if (!low_bound && high_bound)
6484 emit_cmp_and_jump_insns (index,
6485 convert_modes
6486 (mode, imode,
6487 expand_expr (node->low, NULL_RTX,
6488 VOIDmode, 0),
6489 unsignedp),
6490 LT, NULL_RTX, mode, unsignedp,
6491 default_label);
6493 else if (!low_bound && !high_bound)
6495 /* Widen LOW and HIGH to the same width as INDEX. */
6496 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6497 tree low = build1 (CONVERT_EXPR, type, node->low);
6498 tree high = build1 (CONVERT_EXPR, type, node->high);
6499 rtx low_rtx, new_index, new_bound;
6501 /* Instead of doing two branches, emit one unsigned branch for
6502 (index-low) > (high-low). */
6503 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6504 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6505 NULL_RTX, unsignedp,
6506 OPTAB_WIDEN);
6507 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6508 high, low)),
6509 NULL_RTX, mode, 0);
6511 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6512 mode, 1, default_label);
6515 emit_jump (label_rtx (node->code_label));
6520 #include "gt-stmt.h"