Merge branch 'master' of ssh://crater.dragonflybsd.org/repository/git/dragonfly
[dragonfly.git] / contrib / gcc-3.4 / gcc / stmt.c
bloba72e064d9a25e061d9da6e9c7f0e8a6cfefedea3
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
60 #include "target.h"
62 /* Assume that case vectors are not pc-relative. */
63 #ifndef CASE_VECTOR_PC_RELATIVE
64 #define CASE_VECTOR_PC_RELATIVE 0
65 #endif
67 /* Functions and data structures for expanding case statements. */
69 /* Case label structure, used to hold info on labels within case
70 statements. We handle "range" labels; for a single-value label
71 as in C, the high and low limits are the same.
73 An AVL tree of case nodes is initially created, and later transformed
74 to a list linked via the RIGHT fields in the nodes. Nodes with
75 higher case values are later in the list.
77 Switch statements can be output in one of two forms. A branch table
78 is used if there are more than a few labels and the labels are dense
79 within the range between the smallest and largest case value. If a
80 branch table is used, no further manipulations are done with the case
81 node chain.
83 The alternative to the use of a branch table is to generate a series
84 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
85 and PARENT fields to hold a binary tree. Initially the tree is
86 totally unbalanced, with everything on the right. We balance the tree
87 with nodes on the left having lower case values than the parent
88 and nodes on the right having higher values. We then output the tree
89 in order. */
91 struct case_node GTY(())
93 struct case_node *left; /* Left son in binary tree */
94 struct case_node *right; /* Right son in binary tree; also node chain */
95 struct case_node *parent; /* Parent of node in binary tree */
96 tree low; /* Lowest index value for this label */
97 tree high; /* Highest index value for this label */
98 tree code_label; /* Label to jump to when node matches */
99 int balance;
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
105 /* These are used by estimate_case_costs and balance_case_nodes. */
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static int use_cost_table;
110 static int cost_table_initialized;
112 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
113 is unsigned. */
114 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
116 /* Stack of control and binding constructs we are currently inside.
118 These constructs begin when you call `expand_start_WHATEVER'
119 and end when you call `expand_end_WHATEVER'. This stack records
120 info about how the construct began that tells the end-function
121 what to do. It also may provide information about the construct
122 to alter the behavior of other constructs within the body.
123 For example, they may affect the behavior of C `break' and `continue'.
125 Each construct gets one `struct nesting' object.
126 All of these objects are chained through the `all' field.
127 `nesting_stack' points to the first object (innermost construct).
128 The position of an entry on `nesting_stack' is in its `depth' field.
130 Each type of construct has its own individual stack.
131 For example, loops have `loop_stack'. Each object points to the
132 next object of the same type through the `next' field.
134 Some constructs are visible to `break' exit-statements and others
135 are not. Which constructs are visible depends on the language.
136 Therefore, the data structure allows each construct to be visible
137 or not, according to the args given when the construct is started.
138 The construct is visible if the `exit_label' field is non-null.
139 In that case, the value should be a CODE_LABEL rtx. */
141 struct nesting GTY(())
143 struct nesting *all;
144 struct nesting *next;
145 int depth;
146 rtx exit_label;
147 enum nesting_desc {
148 COND_NESTING,
149 LOOP_NESTING,
150 BLOCK_NESTING,
151 CASE_NESTING
152 } desc;
153 union nesting_u
155 /* For conds (if-then and if-then-else statements). */
156 struct nesting_cond
158 /* Label for the end of the if construct.
159 There is none if EXITFLAG was not set
160 and no `else' has been seen yet. */
161 rtx endif_label;
162 /* Label for the end of this alternative.
163 This may be the end of the if or the next else/elseif. */
164 rtx next_label;
165 } GTY ((tag ("COND_NESTING"))) cond;
166 /* For loops. */
167 struct nesting_loop
169 /* Label at the top of the loop; place to loop back to. */
170 rtx start_label;
171 /* Label at the end of the whole construct. */
172 rtx end_label;
173 /* Label for `continue' statement to jump to;
174 this is in front of the stepper of the loop. */
175 rtx continue_label;
176 } GTY ((tag ("LOOP_NESTING"))) loop;
177 /* For variable binding contours. */
178 struct nesting_block
180 /* Sequence number of this binding contour within the function,
181 in order of entry. */
182 int block_start_count;
183 /* Nonzero => value to restore stack to on exit. */
184 rtx stack_level;
185 /* The NOTE that starts this contour.
186 Used by expand_goto to check whether the destination
187 is within each contour or not. */
188 rtx first_insn;
189 /* Innermost containing binding contour that has a stack level. */
190 struct nesting *innermost_stack_block;
191 /* List of cleanups to be run on exit from this contour.
192 This is a list of expressions to be evaluated.
193 The TREE_PURPOSE of each link is the ..._DECL node
194 which the cleanup pertains to. */
195 tree cleanups;
196 /* List of cleanup-lists of blocks containing this block,
197 as they were at the locus where this block appears.
198 There is an element for each containing block,
199 ordered innermost containing block first.
200 The tail of this list can be 0,
201 if all remaining elements would be empty lists.
202 The element's TREE_VALUE is the cleanup-list of that block,
203 which may be null. */
204 tree outer_cleanups;
205 /* Chain of labels defined inside this binding contour.
206 For contours that have stack levels or cleanups. */
207 struct label_chain *label_chain;
208 /* Nonzero if this is associated with an EH region. */
209 int exception_region;
210 /* The saved target_temp_slot_level from our outer block.
211 We may reset target_temp_slot_level to be the level of
212 this block, if that is done, target_temp_slot_level
213 reverts to the saved target_temp_slot_level at the very
214 end of the block. */
215 int block_target_temp_slot_level;
216 /* True if we are currently emitting insns in an area of
217 output code that is controlled by a conditional
218 expression. This is used by the cleanup handling code to
219 generate conditional cleanup actions. */
220 int conditional_code;
221 /* A place to move the start of the exception region for any
222 of the conditional cleanups, must be at the end or after
223 the start of the last unconditional cleanup, and before any
224 conditional branch points. */
225 rtx last_unconditional_cleanup;
226 } GTY ((tag ("BLOCK_NESTING"))) block;
227 /* For switch (C) or case (Pascal) statements,
228 and also for dummies (see `expand_start_case_dummy'). */
229 struct nesting_case
231 /* The insn after which the case dispatch should finally
232 be emitted. Zero for a dummy. */
233 rtx start;
234 /* A list of case labels; it is first built as an AVL tree.
235 During expand_end_case, this is converted to a list, and may be
236 rearranged into a nearly balanced binary tree. */
237 struct case_node *case_list;
238 /* Label to jump to if no case matches. */
239 tree default_label;
240 /* The expression to be dispatched on. */
241 tree index_expr;
242 /* Type that INDEX_EXPR should be converted to. */
243 tree nominal_type;
244 /* Name of this kind of statement, for warnings. */
245 const char *printname;
246 /* Used to save no_line_numbers till we see the first case label.
247 We set this to -1 when we see the first case label in this
248 case statement. */
249 int line_number_status;
250 } GTY ((tag ("CASE_NESTING"))) case_stmt;
251 } GTY ((desc ("%1.desc"))) data;
254 /* Allocate and return a new `struct nesting'. */
256 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
258 /* Pop the nesting stack element by element until we pop off
259 the element which is at the top of STACK.
260 Update all the other stacks, popping off elements from them
261 as we pop them from nesting_stack. */
263 #define POPSTACK(STACK) \
264 do { struct nesting *target = STACK; \
265 struct nesting *this; \
266 do { this = nesting_stack; \
267 if (loop_stack == this) \
268 loop_stack = loop_stack->next; \
269 if (cond_stack == this) \
270 cond_stack = cond_stack->next; \
271 if (block_stack == this) \
272 block_stack = block_stack->next; \
273 if (stack_block_stack == this) \
274 stack_block_stack = stack_block_stack->next; \
275 if (case_stack == this) \
276 case_stack = case_stack->next; \
277 nesting_depth = nesting_stack->depth - 1; \
278 nesting_stack = this->all; } \
279 while (this != target); } while (0)
281 /* In some cases it is impossible to generate code for a forward goto
282 until the label definition is seen. This happens when it may be necessary
283 for the goto to reset the stack pointer: we don't yet know how to do that.
284 So expand_goto puts an entry on this fixup list.
285 Each time a binding contour that resets the stack is exited,
286 we check each fixup.
287 If the target label has now been defined, we can insert the proper code. */
289 struct goto_fixup GTY(())
291 /* Points to following fixup. */
292 struct goto_fixup *next;
293 /* Points to the insn before the jump insn.
294 If more code must be inserted, it goes after this insn. */
295 rtx before_jump;
296 /* The LABEL_DECL that this jump is jumping to, or 0
297 for break, continue or return. */
298 tree target;
299 /* The BLOCK for the place where this goto was found. */
300 tree context;
301 /* The CODE_LABEL rtx that this is jumping to. */
302 rtx target_rtl;
303 /* Number of binding contours started in current function
304 before the label reference. */
305 int block_start_count;
306 /* The outermost stack level that should be restored for this jump.
307 Each time a binding contour that resets the stack is exited,
308 if the target label is *not* yet defined, this slot is updated. */
309 rtx stack_level;
310 /* List of lists of cleanup expressions to be run by this goto.
311 There is one element for each block that this goto is within.
312 The tail of this list can be 0,
313 if all remaining elements would be empty.
314 The TREE_VALUE contains the cleanup list of that block as of the
315 time this goto was seen.
316 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
317 tree cleanup_list_list;
320 /* Within any binding contour that must restore a stack level,
321 all labels are recorded with a chain of these structures. */
323 struct label_chain GTY(())
325 /* Points to following fixup. */
326 struct label_chain *next;
327 tree label;
330 struct stmt_status GTY(())
332 /* Chain of all pending binding contours. */
333 struct nesting * x_block_stack;
335 /* If any new stacks are added here, add them to POPSTACKS too. */
337 /* Chain of all pending binding contours that restore stack levels
338 or have cleanups. */
339 struct nesting * x_stack_block_stack;
341 /* Chain of all pending conditional statements. */
342 struct nesting * x_cond_stack;
344 /* Chain of all pending loops. */
345 struct nesting * x_loop_stack;
347 /* Chain of all pending case or switch statements. */
348 struct nesting * x_case_stack;
350 /* Separate chain including all of the above,
351 chained through the `all' field. */
352 struct nesting * x_nesting_stack;
354 /* Number of entries on nesting_stack now. */
355 int x_nesting_depth;
357 /* Number of binding contours started so far in this function. */
358 int x_block_start_count;
360 /* Each time we expand an expression-statement,
361 record the expr's type and its RTL value here. */
362 tree x_last_expr_type;
363 rtx x_last_expr_value;
364 rtx x_last_expr_alt_rtl;
366 /* Nonzero if within a ({...}) grouping, in which case we must
367 always compute a value for each expr-stmt in case it is the last one. */
368 int x_expr_stmts_for_value;
370 /* Location of last line-number note, whether we actually
371 emitted it or not. */
372 location_t x_emit_locus;
374 struct goto_fixup *x_goto_fixup_chain;
377 #define block_stack (cfun->stmt->x_block_stack)
378 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
379 #define cond_stack (cfun->stmt->x_cond_stack)
380 #define loop_stack (cfun->stmt->x_loop_stack)
381 #define case_stack (cfun->stmt->x_case_stack)
382 #define nesting_stack (cfun->stmt->x_nesting_stack)
383 #define nesting_depth (cfun->stmt->x_nesting_depth)
384 #define current_block_start_count (cfun->stmt->x_block_start_count)
385 #define last_expr_type (cfun->stmt->x_last_expr_type)
386 #define last_expr_value (cfun->stmt->x_last_expr_value)
387 #define last_expr_alt_rtl (cfun->stmt->x_last_expr_alt_rtl)
388 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
389 #define emit_locus (cfun->stmt->x_emit_locus)
390 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
392 /* Nonzero if we are using EH to handle cleanups. */
393 static int using_eh_for_cleanups_p = 0;
395 static int n_occurrences (int, const char *);
396 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
397 static void expand_goto_internal (tree, rtx, rtx);
398 static int expand_fixup (tree, rtx, rtx);
399 static rtx expand_nl_handler_label (rtx, rtx);
400 static void expand_nl_goto_receiver (void);
401 static void expand_nl_goto_receivers (struct nesting *);
402 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
403 static bool check_operand_nalternatives (tree, tree);
404 static bool check_unique_operand_names (tree, tree);
405 static char *resolve_operand_name_1 (char *, tree, tree);
406 static void expand_null_return_1 (rtx);
407 static enum br_predictor return_prediction (rtx);
408 static rtx shift_return_value (rtx);
409 static void expand_value_return (rtx);
410 static int tail_recursion_args (tree, tree);
411 static void expand_cleanups (tree, int, int);
412 static void check_seenlabel (void);
413 static void do_jump_if_equal (rtx, rtx, rtx, int);
414 static int estimate_case_costs (case_node_ptr);
415 static bool same_case_target_p (rtx, rtx);
416 static void strip_default_case_nodes (case_node_ptr *, rtx);
417 static bool lshift_cheap_p (void);
418 static int case_bit_test_cmp (const void *, const void *);
419 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
420 static void group_case_nodes (case_node_ptr);
421 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
422 static int node_has_low_bound (case_node_ptr, tree);
423 static int node_has_high_bound (case_node_ptr, tree);
424 static int node_is_bounded (case_node_ptr, tree);
425 static void emit_jump_if_reachable (rtx);
426 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
427 static struct case_node *case_tree2list (case_node *, case_node *);
429 void
430 using_eh_for_cleanups (void)
432 using_eh_for_cleanups_p = 1;
435 void
436 init_stmt_for_function (void)
438 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
441 /* Record the current file and line. Called from emit_line_note. */
443 void
444 set_file_and_line_for_stmt (location_t location)
446 /* If we're outputting an inline function, and we add a line note,
447 there may be no CFUN->STMT information. So, there's no need to
448 update it. */
449 if (cfun->stmt)
450 emit_locus = location;
453 /* Emit a no-op instruction. */
455 void
456 emit_nop (void)
458 rtx last_insn;
460 last_insn = get_last_insn ();
461 if (!optimize
462 && (GET_CODE (last_insn) == CODE_LABEL
463 || (GET_CODE (last_insn) == NOTE
464 && prev_real_insn (last_insn) == 0)))
465 emit_insn (gen_nop ());
468 /* Return the rtx-label that corresponds to a LABEL_DECL,
469 creating it if necessary. */
472 label_rtx (tree label)
474 if (TREE_CODE (label) != LABEL_DECL)
475 abort ();
477 if (!DECL_RTL_SET_P (label))
478 SET_DECL_RTL (label, gen_label_rtx ());
480 return DECL_RTL (label);
483 /* As above, but also put it on the forced-reference list of the
484 function that contains it. */
486 force_label_rtx (tree label)
488 rtx ref = label_rtx (label);
489 tree function = decl_function_context (label);
490 struct function *p;
492 if (!function)
493 abort ();
495 if (function != current_function_decl
496 && function != inline_function_decl)
497 p = find_function_data (function);
498 else
499 p = cfun;
501 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
502 p->expr->x_forced_labels);
503 return ref;
506 /* Add an unconditional jump to LABEL as the next sequential instruction. */
508 void
509 emit_jump (rtx label)
511 do_pending_stack_adjust ();
512 emit_jump_insn (gen_jump (label));
513 emit_barrier ();
516 /* Emit code to jump to the address
517 specified by the pointer expression EXP. */
519 void
520 expand_computed_goto (tree exp)
522 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
524 x = convert_memory_address (Pmode, x);
526 emit_queue ();
528 if (! cfun->computed_goto_common_label)
530 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
531 cfun->computed_goto_common_label = gen_label_rtx ();
533 do_pending_stack_adjust ();
534 emit_label (cfun->computed_goto_common_label);
535 emit_indirect_jump (cfun->computed_goto_common_reg);
537 current_function_has_computed_jump = 1;
539 else
541 emit_move_insn (cfun->computed_goto_common_reg, x);
542 emit_jump (cfun->computed_goto_common_label);
546 /* Handle goto statements and the labels that they can go to. */
548 /* Specify the location in the RTL code of a label LABEL,
549 which is a LABEL_DECL tree node.
551 This is used for the kind of label that the user can jump to with a
552 goto statement, and for alternatives of a switch or case statement.
553 RTL labels generated for loops and conditionals don't go through here;
554 they are generated directly at the RTL level, by other functions below.
556 Note that this has nothing to do with defining label *names*.
557 Languages vary in how they do that and what that even means. */
559 void
560 expand_label (tree label)
562 struct label_chain *p;
564 do_pending_stack_adjust ();
565 emit_label (label_rtx (label));
566 if (DECL_NAME (label))
567 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
569 if (stack_block_stack != 0)
571 p = ggc_alloc (sizeof (struct label_chain));
572 p->next = stack_block_stack->data.block.label_chain;
573 stack_block_stack->data.block.label_chain = p;
574 p->label = label;
578 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
579 from nested functions. */
581 void
582 declare_nonlocal_label (tree label)
584 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
586 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
587 LABEL_PRESERVE_P (label_rtx (label)) = 1;
588 if (nonlocal_goto_handler_slots == 0)
590 emit_stack_save (SAVE_NONLOCAL,
591 &nonlocal_goto_stack_level,
592 PREV_INSN (tail_recursion_reentry));
594 nonlocal_goto_handler_slots
595 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
598 /* Generate RTL code for a `goto' statement with target label LABEL.
599 LABEL should be a LABEL_DECL tree node that was or will later be
600 defined with `expand_label'. */
602 void
603 expand_goto (tree label)
605 tree context;
607 /* Check for a nonlocal goto to a containing function. */
608 context = decl_function_context (label);
609 if (context != 0 && context != current_function_decl)
611 struct function *p = find_function_data (context);
612 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
613 rtx handler_slot, static_chain, save_area, insn;
614 tree link;
616 /* Find the corresponding handler slot for this label. */
617 handler_slot = p->x_nonlocal_goto_handler_slots;
618 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
619 link = TREE_CHAIN (link))
620 handler_slot = XEXP (handler_slot, 1);
621 handler_slot = XEXP (handler_slot, 0);
623 p->has_nonlocal_label = 1;
624 current_function_has_nonlocal_goto = 1;
625 LABEL_REF_NONLOCAL_P (label_ref) = 1;
627 /* Copy the rtl for the slots so that they won't be shared in
628 case the virtual stack vars register gets instantiated differently
629 in the parent than in the child. */
631 static_chain = copy_to_reg (lookup_static_chain (label));
633 /* Get addr of containing function's current nonlocal goto handler,
634 which will do any cleanups and then jump to the label. */
635 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
636 virtual_stack_vars_rtx,
637 static_chain));
639 /* Get addr of containing function's nonlocal save area. */
640 save_area = p->x_nonlocal_goto_stack_level;
641 if (save_area)
642 save_area = replace_rtx (copy_rtx (save_area),
643 virtual_stack_vars_rtx, static_chain);
645 #if HAVE_nonlocal_goto
646 if (HAVE_nonlocal_goto)
647 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
648 save_area, label_ref));
649 else
650 #endif
652 emit_insn (gen_rtx_CLOBBER (VOIDmode,
653 gen_rtx_MEM (BLKmode,
654 gen_rtx_SCRATCH (VOIDmode))));
655 emit_insn (gen_rtx_CLOBBER (VOIDmode,
656 gen_rtx_MEM (BLKmode,
657 hard_frame_pointer_rtx)));
659 /* Restore frame pointer for containing function.
660 This sets the actual hard register used for the frame pointer
661 to the location of the function's incoming static chain info.
662 The non-local goto handler will then adjust it to contain the
663 proper value and reload the argument pointer, if needed. */
664 emit_move_insn (hard_frame_pointer_rtx, static_chain);
665 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
667 /* USE of hard_frame_pointer_rtx added for consistency;
668 not clear if really needed. */
669 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
670 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
671 emit_indirect_jump (handler_slot);
674 /* Search backwards to the jump insn and mark it as a
675 non-local goto. */
676 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
678 if (GET_CODE (insn) == JUMP_INSN)
680 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
681 const0_rtx, REG_NOTES (insn));
682 break;
684 else if (GET_CODE (insn) == CALL_INSN)
685 break;
688 else
689 expand_goto_internal (label, label_rtx (label), NULL_RTX);
692 /* Generate RTL code for a `goto' statement with target label BODY.
693 LABEL should be a LABEL_REF.
694 LAST_INSN, if non-0, is the rtx we should consider as the last
695 insn emitted (for the purposes of cleaning up a return). */
697 static void
698 expand_goto_internal (tree body, rtx label, rtx last_insn)
700 struct nesting *block;
701 rtx stack_level = 0;
703 if (GET_CODE (label) != CODE_LABEL)
704 abort ();
706 /* If label has already been defined, we can tell now
707 whether and how we must alter the stack level. */
709 if (PREV_INSN (label) != 0)
711 /* Find the innermost pending block that contains the label.
712 (Check containment by comparing insn-uids.)
713 Then restore the outermost stack level within that block,
714 and do cleanups of all blocks contained in it. */
715 for (block = block_stack; block; block = block->next)
717 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
718 break;
719 if (block->data.block.stack_level != 0)
720 stack_level = block->data.block.stack_level;
721 /* Execute the cleanups for blocks we are exiting. */
722 if (block->data.block.cleanups != 0)
724 expand_cleanups (block->data.block.cleanups, 1, 1);
725 do_pending_stack_adjust ();
729 if (stack_level)
731 /* Ensure stack adjust isn't done by emit_jump, as this
732 would clobber the stack pointer. This one should be
733 deleted as dead by flow. */
734 clear_pending_stack_adjust ();
735 do_pending_stack_adjust ();
737 /* Don't do this adjust if it's to the end label and this function
738 is to return with a depressed stack pointer. */
739 if (label == return_label
740 && (((TREE_CODE (TREE_TYPE (current_function_decl))
741 == FUNCTION_TYPE)
742 && (TYPE_RETURNS_STACK_DEPRESSED
743 (TREE_TYPE (current_function_decl))))))
745 else
746 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
749 if (body != 0 && DECL_TOO_LATE (body))
750 error ("jump to `%s' invalidly jumps into binding contour",
751 IDENTIFIER_POINTER (DECL_NAME (body)));
753 /* Label not yet defined: may need to put this goto
754 on the fixup list. */
755 else if (! expand_fixup (body, label, last_insn))
757 /* No fixup needed. Record that the label is the target
758 of at least one goto that has no fixup. */
759 if (body != 0)
760 TREE_ADDRESSABLE (body) = 1;
763 emit_jump (label);
766 /* Generate if necessary a fixup for a goto
767 whose target label in tree structure (if any) is TREE_LABEL
768 and whose target in rtl is RTL_LABEL.
770 If LAST_INSN is nonzero, we pretend that the jump appears
771 after insn LAST_INSN instead of at the current point in the insn stream.
773 The fixup will be used later to insert insns just before the goto.
774 Those insns will restore the stack level as appropriate for the
775 target label, and will (in the case of C++) also invoke any object
776 destructors which have to be invoked when we exit the scopes which
777 are exited by the goto.
779 Value is nonzero if a fixup is made. */
781 static int
782 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
784 struct nesting *block, *end_block;
786 /* See if we can recognize which block the label will be output in.
787 This is possible in some very common cases.
788 If we succeed, set END_BLOCK to that block.
789 Otherwise, set it to 0. */
791 if (cond_stack
792 && (rtl_label == cond_stack->data.cond.endif_label
793 || rtl_label == cond_stack->data.cond.next_label))
794 end_block = cond_stack;
795 /* If we are in a loop, recognize certain labels which
796 are likely targets. This reduces the number of fixups
797 we need to create. */
798 else if (loop_stack
799 && (rtl_label == loop_stack->data.loop.start_label
800 || rtl_label == loop_stack->data.loop.end_label
801 || rtl_label == loop_stack->data.loop.continue_label))
802 end_block = loop_stack;
803 else
804 end_block = 0;
806 /* Now set END_BLOCK to the binding level to which we will return. */
808 if (end_block)
810 struct nesting *next_block = end_block->all;
811 block = block_stack;
813 /* First see if the END_BLOCK is inside the innermost binding level.
814 If so, then no cleanups or stack levels are relevant. */
815 while (next_block && next_block != block)
816 next_block = next_block->all;
818 if (next_block)
819 return 0;
821 /* Otherwise, set END_BLOCK to the innermost binding level
822 which is outside the relevant control-structure nesting. */
823 next_block = block_stack->next;
824 for (block = block_stack; block != end_block; block = block->all)
825 if (block == next_block)
826 next_block = next_block->next;
827 end_block = next_block;
830 /* Does any containing block have a stack level or cleanups?
831 If not, no fixup is needed, and that is the normal case
832 (the only case, for standard C). */
833 for (block = block_stack; block != end_block; block = block->next)
834 if (block->data.block.stack_level != 0
835 || block->data.block.cleanups != 0)
836 break;
838 if (block != end_block)
840 /* Ok, a fixup is needed. Add a fixup to the list of such. */
841 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
842 /* In case an old stack level is restored, make sure that comes
843 after any pending stack adjust. */
844 /* ?? If the fixup isn't to come at the present position,
845 doing the stack adjust here isn't useful. Doing it with our
846 settings at that location isn't useful either. Let's hope
847 someone does it! */
848 if (last_insn == 0)
849 do_pending_stack_adjust ();
850 fixup->target = tree_label;
851 fixup->target_rtl = rtl_label;
853 /* Create a BLOCK node and a corresponding matched set of
854 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
855 this point. The notes will encapsulate any and all fixup
856 code which we might later insert at this point in the insn
857 stream. Also, the BLOCK node will be the parent (i.e. the
858 `SUPERBLOCK') of any other BLOCK nodes which we might create
859 later on when we are expanding the fixup code.
861 Note that optimization passes (including expand_end_loop)
862 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
863 as a placeholder. */
866 rtx original_before_jump
867 = last_insn ? last_insn : get_last_insn ();
868 rtx start;
869 rtx end;
870 tree block;
872 block = make_node (BLOCK);
873 TREE_USED (block) = 1;
875 if (!cfun->x_whole_function_mode_p)
876 (*lang_hooks.decls.insert_block) (block);
877 else
879 BLOCK_CHAIN (block)
880 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
881 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
882 = block;
885 start_sequence ();
886 start = emit_note (NOTE_INSN_BLOCK_BEG);
887 if (cfun->x_whole_function_mode_p)
888 NOTE_BLOCK (start) = block;
889 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
890 end = emit_note (NOTE_INSN_BLOCK_END);
891 if (cfun->x_whole_function_mode_p)
892 NOTE_BLOCK (end) = block;
893 fixup->context = block;
894 end_sequence ();
895 emit_insn_after (start, original_before_jump);
898 fixup->block_start_count = current_block_start_count;
899 fixup->stack_level = 0;
900 fixup->cleanup_list_list
901 = ((block->data.block.outer_cleanups
902 || block->data.block.cleanups)
903 ? tree_cons (NULL_TREE, block->data.block.cleanups,
904 block->data.block.outer_cleanups)
905 : 0);
906 fixup->next = goto_fixup_chain;
907 goto_fixup_chain = fixup;
910 return block != 0;
913 /* Expand any needed fixups in the outputmost binding level of the
914 function. FIRST_INSN is the first insn in the function. */
916 void
917 expand_fixups (rtx first_insn)
919 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
922 /* When exiting a binding contour, process all pending gotos requiring fixups.
923 THISBLOCK is the structure that describes the block being exited.
924 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
925 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
926 FIRST_INSN is the insn that began this contour.
928 Gotos that jump out of this contour must restore the
929 stack level and do the cleanups before actually jumping.
931 DONT_JUMP_IN positive means report error if there is a jump into this
932 contour from before the beginning of the contour. This is also done if
933 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
935 static void
936 fixup_gotos (struct nesting *thisblock, rtx stack_level,
937 tree cleanup_list, rtx first_insn, int dont_jump_in)
939 struct goto_fixup *f, *prev;
941 /* F is the fixup we are considering; PREV is the previous one. */
942 /* We run this loop in two passes so that cleanups of exited blocks
943 are run first, and blocks that are exited are marked so
944 afterwards. */
946 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
948 /* Test for a fixup that is inactive because it is already handled. */
949 if (f->before_jump == 0)
951 /* Delete inactive fixup from the chain, if that is easy to do. */
952 if (prev != 0)
953 prev->next = f->next;
955 /* Has this fixup's target label been defined?
956 If so, we can finalize it. */
957 else if (PREV_INSN (f->target_rtl) != 0)
959 rtx cleanup_insns;
961 /* If this fixup jumped into this contour from before the beginning
962 of this contour, report an error. This code used to use
963 the first non-label insn after f->target_rtl, but that's
964 wrong since such can be added, by things like put_var_into_stack
965 and have INSN_UIDs that are out of the range of the block. */
966 /* ??? Bug: this does not detect jumping in through intermediate
967 blocks that have stack levels or cleanups.
968 It detects only a problem with the innermost block
969 around the label. */
970 if (f->target != 0
971 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
972 || cleanup_list)
973 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
974 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
975 && ! DECL_ERROR_ISSUED (f->target))
977 error ("%Jlabel '%D' used before containing binding contour",
978 f->target, f->target);
979 /* Prevent multiple errors for one label. */
980 DECL_ERROR_ISSUED (f->target) = 1;
983 /* We will expand the cleanups into a sequence of their own and
984 then later on we will attach this new sequence to the insn
985 stream just ahead of the actual jump insn. */
987 start_sequence ();
989 /* Temporarily restore the lexical context where we will
990 logically be inserting the fixup code. We do this for the
991 sake of getting the debugging information right. */
993 (*lang_hooks.decls.pushlevel) (0);
994 (*lang_hooks.decls.set_block) (f->context);
996 /* Expand the cleanups for blocks this jump exits. */
997 if (f->cleanup_list_list)
999 tree lists;
1000 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1001 /* Marked elements correspond to blocks that have been closed.
1002 Do their cleanups. */
1003 if (TREE_ADDRESSABLE (lists)
1004 && TREE_VALUE (lists) != 0)
1006 expand_cleanups (TREE_VALUE (lists), 1, 1);
1007 /* Pop any pushes done in the cleanups,
1008 in case function is about to return. */
1009 do_pending_stack_adjust ();
1013 /* Restore stack level for the biggest contour that this
1014 jump jumps out of. */
1015 if (f->stack_level
1016 && ! (f->target_rtl == return_label
1017 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1018 == FUNCTION_TYPE)
1019 && (TYPE_RETURNS_STACK_DEPRESSED
1020 (TREE_TYPE (current_function_decl))))))
1021 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1023 /* Finish up the sequence containing the insns which implement the
1024 necessary cleanups, and then attach that whole sequence to the
1025 insn stream just ahead of the actual jump insn. Attaching it
1026 at that point insures that any cleanups which are in fact
1027 implicit C++ object destructions (which must be executed upon
1028 leaving the block) appear (to the debugger) to be taking place
1029 in an area of the generated code where the object(s) being
1030 destructed are still "in scope". */
1032 cleanup_insns = get_insns ();
1033 (*lang_hooks.decls.poplevel) (1, 0, 0);
1035 end_sequence ();
1036 emit_insn_after (cleanup_insns, f->before_jump);
1038 f->before_jump = 0;
1042 /* For any still-undefined labels, do the cleanups for this block now.
1043 We must do this now since items in the cleanup list may go out
1044 of scope when the block ends. */
1045 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1046 if (f->before_jump != 0
1047 && PREV_INSN (f->target_rtl) == 0
1048 /* Label has still not appeared. If we are exiting a block with
1049 a stack level to restore, that started before the fixup,
1050 mark this stack level as needing restoration
1051 when the fixup is later finalized. */
1052 && thisblock != 0
1053 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1054 means the label is undefined. That's erroneous, but possible. */
1055 && (thisblock->data.block.block_start_count
1056 <= f->block_start_count))
1058 tree lists = f->cleanup_list_list;
1059 rtx cleanup_insns;
1061 for (; lists; lists = TREE_CHAIN (lists))
1062 /* If the following elt. corresponds to our containing block
1063 then the elt. must be for this block. */
1064 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1066 start_sequence ();
1067 (*lang_hooks.decls.pushlevel) (0);
1068 (*lang_hooks.decls.set_block) (f->context);
1069 expand_cleanups (TREE_VALUE (lists), 1, 1);
1070 do_pending_stack_adjust ();
1071 cleanup_insns = get_insns ();
1072 (*lang_hooks.decls.poplevel) (1, 0, 0);
1073 end_sequence ();
1074 if (cleanup_insns != 0)
1075 f->before_jump
1076 = emit_insn_after (cleanup_insns, f->before_jump);
1078 f->cleanup_list_list = TREE_CHAIN (lists);
1081 if (stack_level)
1082 f->stack_level = stack_level;
1086 /* Return the number of times character C occurs in string S. */
1087 static int
1088 n_occurrences (int c, const char *s)
1090 int n = 0;
1091 while (*s)
1092 n += (*s++ == c);
1093 return n;
1096 /* Generate RTL for an asm statement (explicit assembler code).
1097 STRING is a STRING_CST node containing the assembler code text,
1098 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1099 insn is volatile; don't optimize it. */
1101 void
1102 expand_asm (tree string, int vol)
1104 rtx body;
1106 if (TREE_CODE (string) == ADDR_EXPR)
1107 string = TREE_OPERAND (string, 0);
1109 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1111 MEM_VOLATILE_P (body) = vol;
1113 emit_insn (body);
1115 clear_last_expr ();
1118 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1119 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1120 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1121 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1122 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1123 constraint allows the use of a register operand. And, *IS_INOUT
1124 will be true if the operand is read-write, i.e., if it is used as
1125 an input as well as an output. If *CONSTRAINT_P is not in
1126 canonical form, it will be made canonical. (Note that `+' will be
1127 replaced with `=' as part of this process.)
1129 Returns TRUE if all went well; FALSE if an error occurred. */
1131 bool
1132 parse_output_constraint (const char **constraint_p, int operand_num,
1133 int ninputs, int noutputs, bool *allows_mem,
1134 bool *allows_reg, bool *is_inout)
1136 const char *constraint = *constraint_p;
1137 const char *p;
1139 /* Assume the constraint doesn't allow the use of either a register
1140 or memory. */
1141 *allows_mem = false;
1142 *allows_reg = false;
1144 /* Allow the `=' or `+' to not be at the beginning of the string,
1145 since it wasn't explicitly documented that way, and there is a
1146 large body of code that puts it last. Swap the character to
1147 the front, so as not to uglify any place else. */
1148 p = strchr (constraint, '=');
1149 if (!p)
1150 p = strchr (constraint, '+');
1152 /* If the string doesn't contain an `=', issue an error
1153 message. */
1154 if (!p)
1156 error ("output operand constraint lacks `='");
1157 return false;
1160 /* If the constraint begins with `+', then the operand is both read
1161 from and written to. */
1162 *is_inout = (*p == '+');
1164 /* Canonicalize the output constraint so that it begins with `='. */
1165 if (p != constraint || is_inout)
1167 char *buf;
1168 size_t c_len = strlen (constraint);
1170 if (p != constraint)
1171 warning ("output constraint `%c' for operand %d is not at the beginning",
1172 *p, operand_num);
1174 /* Make a copy of the constraint. */
1175 buf = alloca (c_len + 1);
1176 strcpy (buf, constraint);
1177 /* Swap the first character and the `=' or `+'. */
1178 buf[p - constraint] = buf[0];
1179 /* Make sure the first character is an `='. (Until we do this,
1180 it might be a `+'.) */
1181 buf[0] = '=';
1182 /* Replace the constraint with the canonicalized string. */
1183 *constraint_p = ggc_alloc_string (buf, c_len);
1184 constraint = *constraint_p;
1187 /* Loop through the constraint string. */
1188 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1189 switch (*p)
1191 case '+':
1192 case '=':
1193 error ("operand constraint contains incorrectly positioned '+' or '='");
1194 return false;
1196 case '%':
1197 if (operand_num + 1 == ninputs + noutputs)
1199 error ("`%%' constraint used with last operand");
1200 return false;
1202 break;
1204 case 'V': case 'm': case 'o':
1205 *allows_mem = true;
1206 break;
1208 case '?': case '!': case '*': case '&': case '#':
1209 case 'E': case 'F': case 'G': case 'H':
1210 case 's': case 'i': case 'n':
1211 case 'I': case 'J': case 'K': case 'L': case 'M':
1212 case 'N': case 'O': case 'P': case ',':
1213 break;
1215 case '0': case '1': case '2': case '3': case '4':
1216 case '5': case '6': case '7': case '8': case '9':
1217 case '[':
1218 error ("matching constraint not valid in output operand");
1219 return false;
1221 case '<': case '>':
1222 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1223 excepting those that expand_call created. So match memory
1224 and hope. */
1225 *allows_mem = true;
1226 break;
1228 case 'g': case 'X':
1229 *allows_reg = true;
1230 *allows_mem = true;
1231 break;
1233 case 'p': case 'r':
1234 *allows_reg = true;
1235 break;
1237 default:
1238 if (!ISALPHA (*p))
1239 break;
1240 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1241 *allows_reg = true;
1242 #ifdef EXTRA_CONSTRAINT_STR
1243 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1244 *allows_reg = true;
1245 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1246 *allows_mem = true;
1247 else
1249 /* Otherwise we can't assume anything about the nature of
1250 the constraint except that it isn't purely registers.
1251 Treat it like "g" and hope for the best. */
1252 *allows_reg = true;
1253 *allows_mem = true;
1255 #endif
1256 break;
1259 return true;
1262 /* Similar, but for input constraints. */
1264 bool
1265 parse_input_constraint (const char **constraint_p, int input_num,
1266 int ninputs, int noutputs, int ninout,
1267 const char * const * constraints,
1268 bool *allows_mem, bool *allows_reg)
1270 const char *constraint = *constraint_p;
1271 const char *orig_constraint = constraint;
1272 size_t c_len = strlen (constraint);
1273 size_t j;
1274 bool saw_match = false;
1276 /* Assume the constraint doesn't allow the use of either
1277 a register or memory. */
1278 *allows_mem = false;
1279 *allows_reg = false;
1281 /* Make sure constraint has neither `=', `+', nor '&'. */
1283 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1284 switch (constraint[j])
1286 case '+': case '=': case '&':
1287 if (constraint == orig_constraint)
1289 error ("input operand constraint contains `%c'", constraint[j]);
1290 return false;
1292 break;
1294 case '%':
1295 if (constraint == orig_constraint
1296 && input_num + 1 == ninputs - ninout)
1298 error ("`%%' constraint used with last operand");
1299 return false;
1301 break;
1303 case 'V': case 'm': case 'o':
1304 *allows_mem = true;
1305 break;
1307 case '<': case '>':
1308 case '?': case '!': case '*': case '#':
1309 case 'E': case 'F': case 'G': case 'H':
1310 case 's': case 'i': case 'n':
1311 case 'I': case 'J': case 'K': case 'L': case 'M':
1312 case 'N': case 'O': case 'P': case ',':
1313 break;
1315 /* Whether or not a numeric constraint allows a register is
1316 decided by the matching constraint, and so there is no need
1317 to do anything special with them. We must handle them in
1318 the default case, so that we don't unnecessarily force
1319 operands to memory. */
1320 case '0': case '1': case '2': case '3': case '4':
1321 case '5': case '6': case '7': case '8': case '9':
1323 char *end;
1324 unsigned long match;
1326 saw_match = true;
1328 match = strtoul (constraint + j, &end, 10);
1329 if (match >= (unsigned long) noutputs)
1331 error ("matching constraint references invalid operand number");
1332 return false;
1335 /* Try and find the real constraint for this dup. Only do this
1336 if the matching constraint is the only alternative. */
1337 if (*end == '\0'
1338 && (j == 0 || (j == 1 && constraint[0] == '%')))
1340 constraint = constraints[match];
1341 *constraint_p = constraint;
1342 c_len = strlen (constraint);
1343 j = 0;
1344 /* ??? At the end of the loop, we will skip the first part of
1345 the matched constraint. This assumes not only that the
1346 other constraint is an output constraint, but also that
1347 the '=' or '+' come first. */
1348 break;
1350 else
1351 j = end - constraint;
1352 /* Anticipate increment at end of loop. */
1353 j--;
1355 /* Fall through. */
1357 case 'p': case 'r':
1358 *allows_reg = true;
1359 break;
1361 case 'g': case 'X':
1362 *allows_reg = true;
1363 *allows_mem = true;
1364 break;
1366 default:
1367 if (! ISALPHA (constraint[j]))
1369 error ("invalid punctuation `%c' in constraint", constraint[j]);
1370 return false;
1372 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1373 != NO_REGS)
1374 *allows_reg = true;
1375 #ifdef EXTRA_CONSTRAINT_STR
1376 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1377 *allows_reg = true;
1378 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1379 *allows_mem = true;
1380 else
1382 /* Otherwise we can't assume anything about the nature of
1383 the constraint except that it isn't purely registers.
1384 Treat it like "g" and hope for the best. */
1385 *allows_reg = true;
1386 *allows_mem = true;
1388 #endif
1389 break;
1392 if (saw_match && !*allows_reg)
1393 warning ("matching constraint does not allow a register");
1395 return true;
1398 /* Check for overlap between registers marked in CLOBBERED_REGS and
1399 anything inappropriate in DECL. Emit error and return TRUE for error,
1400 FALSE for ok. */
1402 static bool
1403 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1405 /* Conflicts between asm-declared register variables and the clobber
1406 list are not allowed. */
1407 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1408 && DECL_REGISTER (decl)
1409 && REG_P (DECL_RTL (decl))
1410 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1412 rtx reg = DECL_RTL (decl);
1413 unsigned int regno;
1415 for (regno = REGNO (reg);
1416 regno < (REGNO (reg)
1417 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1418 regno++)
1419 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1421 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1422 IDENTIFIER_POINTER (DECL_NAME (decl)));
1424 /* Reset registerness to stop multiple errors emitted for a
1425 single variable. */
1426 DECL_REGISTER (decl) = 0;
1427 return true;
1430 return false;
1433 /* Generate RTL for an asm statement with arguments.
1434 STRING is the instruction template.
1435 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1436 Each output or input has an expression in the TREE_VALUE and
1437 and a tree list in TREE_PURPOSE which in turn contains a constraint
1438 name in TREE_VALUE (or NULL_TREE) and a constraint string
1439 in TREE_PURPOSE.
1440 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1441 that is clobbered by this insn.
1443 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1444 Some elements of OUTPUTS may be replaced with trees representing temporary
1445 values. The caller should copy those temporary values to the originally
1446 specified lvalues.
1448 VOL nonzero means the insn is volatile; don't optimize it. */
1450 void
1451 expand_asm_operands (tree string, tree outputs, tree inputs,
1452 tree clobbers, int vol, location_t locus)
1454 rtvec argvec, constraintvec;
1455 rtx body;
1456 int ninputs = list_length (inputs);
1457 int noutputs = list_length (outputs);
1458 int ninout;
1459 int nclobbers;
1460 HARD_REG_SET clobbered_regs;
1461 int clobber_conflict_found = 0;
1462 tree tail;
1463 tree t;
1464 int i;
1465 /* Vector of RTX's of evaluated output operands. */
1466 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1467 int *inout_opnum = alloca (noutputs * sizeof (int));
1468 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1469 enum machine_mode *inout_mode
1470 = alloca (noutputs * sizeof (enum machine_mode));
1471 const char **constraints
1472 = alloca ((noutputs + ninputs) * sizeof (const char *));
1473 int old_generating_concat_p = generating_concat_p;
1475 /* An ASM with no outputs needs to be treated as volatile, for now. */
1476 if (noutputs == 0)
1477 vol = 1;
1479 if (! check_operand_nalternatives (outputs, inputs))
1480 return;
1482 string = resolve_asm_operand_names (string, outputs, inputs);
1484 /* Collect constraints. */
1485 i = 0;
1486 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1487 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1488 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1489 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1491 #ifdef MD_ASM_CLOBBERS
1492 /* Sometimes we wish to automatically clobber registers across an asm.
1493 Case in point is when the i386 backend moved from cc0 to a hard reg --
1494 maintaining source-level compatibility means automatically clobbering
1495 the flags register. */
1496 MD_ASM_CLOBBERS (clobbers);
1497 #endif
1499 /* Count the number of meaningful clobbered registers, ignoring what
1500 we would ignore later. */
1501 nclobbers = 0;
1502 CLEAR_HARD_REG_SET (clobbered_regs);
1503 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1505 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1507 i = decode_reg_name (regname);
1508 if (i >= 0 || i == -4)
1509 ++nclobbers;
1510 else if (i == -2)
1511 error ("unknown register name `%s' in `asm'", regname);
1513 /* Mark clobbered registers. */
1514 if (i >= 0)
1516 /* Clobbering the PIC register is an error */
1517 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1519 error ("PIC register `%s' clobbered in `asm'", regname);
1520 return;
1523 SET_HARD_REG_BIT (clobbered_regs, i);
1527 clear_last_expr ();
1529 /* First pass over inputs and outputs checks validity and sets
1530 mark_addressable if needed. */
1532 ninout = 0;
1533 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1535 tree val = TREE_VALUE (tail);
1536 tree type = TREE_TYPE (val);
1537 const char *constraint;
1538 bool is_inout;
1539 bool allows_reg;
1540 bool allows_mem;
1542 /* If there's an erroneous arg, emit no insn. */
1543 if (type == error_mark_node)
1544 return;
1546 /* Try to parse the output constraint. If that fails, there's
1547 no point in going further. */
1548 constraint = constraints[i];
1549 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1550 &allows_mem, &allows_reg, &is_inout))
1551 return;
1553 if (! allows_reg
1554 && (allows_mem
1555 || is_inout
1556 || (DECL_P (val)
1557 && GET_CODE (DECL_RTL (val)) == REG
1558 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1559 (*lang_hooks.mark_addressable) (val);
1561 if (is_inout)
1562 ninout++;
1565 ninputs += ninout;
1566 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1568 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1569 return;
1572 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1574 bool allows_reg, allows_mem;
1575 const char *constraint;
1577 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1578 would get VOIDmode and that could cause a crash in reload. */
1579 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1580 return;
1582 constraint = constraints[i + noutputs];
1583 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1584 constraints, &allows_mem, &allows_reg))
1585 return;
1587 if (! allows_reg && allows_mem)
1588 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1591 /* Second pass evaluates arguments. */
1593 ninout = 0;
1594 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1596 tree val = TREE_VALUE (tail);
1597 tree type = TREE_TYPE (val);
1598 bool is_inout;
1599 bool allows_reg;
1600 bool allows_mem;
1601 rtx op;
1603 if (!parse_output_constraint (&constraints[i], i, ninputs,
1604 noutputs, &allows_mem, &allows_reg,
1605 &is_inout))
1606 abort ();
1608 /* If an output operand is not a decl or indirect ref and our constraint
1609 allows a register, make a temporary to act as an intermediate.
1610 Make the asm insn write into that, then our caller will copy it to
1611 the real output operand. Likewise for promoted variables. */
1613 generating_concat_p = 0;
1615 real_output_rtx[i] = NULL_RTX;
1616 if ((TREE_CODE (val) == INDIRECT_REF
1617 && allows_mem)
1618 || (DECL_P (val)
1619 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1620 && ! (GET_CODE (DECL_RTL (val)) == REG
1621 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1622 || ! allows_reg
1623 || is_inout)
1625 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1626 if (GET_CODE (op) == MEM)
1627 op = validize_mem (op);
1629 if (! allows_reg && GET_CODE (op) != MEM)
1630 error ("output number %d not directly addressable", i);
1631 if ((! allows_mem && GET_CODE (op) == MEM)
1632 || GET_CODE (op) == CONCAT)
1634 real_output_rtx[i] = protect_from_queue (op, 1);
1635 op = gen_reg_rtx (GET_MODE (op));
1636 if (is_inout)
1637 emit_move_insn (op, real_output_rtx[i]);
1640 else
1642 op = assign_temp (type, 0, 0, 1);
1643 op = validize_mem (op);
1644 TREE_VALUE (tail) = make_tree (type, op);
1646 output_rtx[i] = op;
1648 generating_concat_p = old_generating_concat_p;
1650 if (is_inout)
1652 inout_mode[ninout] = TYPE_MODE (type);
1653 inout_opnum[ninout++] = i;
1656 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1657 clobber_conflict_found = 1;
1660 /* Make vectors for the expression-rtx, constraint strings,
1661 and named operands. */
1663 argvec = rtvec_alloc (ninputs);
1664 constraintvec = rtvec_alloc (ninputs);
1666 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1667 : GET_MODE (output_rtx[0])),
1668 TREE_STRING_POINTER (string),
1669 empty_string, 0, argvec, constraintvec,
1670 locus.file, locus.line);
1672 MEM_VOLATILE_P (body) = vol;
1674 /* Eval the inputs and put them into ARGVEC.
1675 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1677 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1679 bool allows_reg, allows_mem;
1680 const char *constraint;
1681 tree val, type;
1682 rtx op;
1684 constraint = constraints[i + noutputs];
1685 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1686 constraints, &allows_mem, &allows_reg))
1687 abort ();
1689 generating_concat_p = 0;
1691 val = TREE_VALUE (tail);
1692 type = TREE_TYPE (val);
1693 op = expand_expr (val, NULL_RTX, VOIDmode,
1694 (allows_mem && !allows_reg
1695 ? EXPAND_MEMORY : EXPAND_NORMAL));
1697 /* Never pass a CONCAT to an ASM. */
1698 if (GET_CODE (op) == CONCAT)
1699 op = force_reg (GET_MODE (op), op);
1700 else if (GET_CODE (op) == MEM)
1701 op = validize_mem (op);
1703 if (asm_operand_ok (op, constraint) <= 0)
1705 if (allows_reg)
1706 op = force_reg (TYPE_MODE (type), op);
1707 else if (!allows_mem)
1708 warning ("asm operand %d probably doesn't match constraints",
1709 i + noutputs);
1710 else if (GET_CODE (op) == MEM)
1712 /* We won't recognize either volatile memory or memory
1713 with a queued address as available a memory_operand
1714 at this point. Ignore it: clearly this *is* a memory. */
1716 else
1718 warning ("use of memory input without lvalue in "
1719 "asm operand %d is deprecated", i + noutputs);
1721 if (CONSTANT_P (op))
1723 rtx mem = force_const_mem (TYPE_MODE (type), op);
1724 if (mem)
1725 op = validize_mem (mem);
1726 else
1727 op = force_reg (TYPE_MODE (type), op);
1729 if (GET_CODE (op) == REG
1730 || GET_CODE (op) == SUBREG
1731 || GET_CODE (op) == ADDRESSOF
1732 || GET_CODE (op) == CONCAT)
1734 tree qual_type = build_qualified_type (type,
1735 (TYPE_QUALS (type)
1736 | TYPE_QUAL_CONST));
1737 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1738 memloc = validize_mem (memloc);
1739 emit_move_insn (memloc, op);
1740 op = memloc;
1745 generating_concat_p = old_generating_concat_p;
1746 ASM_OPERANDS_INPUT (body, i) = op;
1748 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1749 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1751 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1752 clobber_conflict_found = 1;
1755 /* Protect all the operands from the queue now that they have all been
1756 evaluated. */
1758 generating_concat_p = 0;
1760 for (i = 0; i < ninputs - ninout; i++)
1761 ASM_OPERANDS_INPUT (body, i)
1762 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1764 for (i = 0; i < noutputs; i++)
1765 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1767 /* For in-out operands, copy output rtx to input rtx. */
1768 for (i = 0; i < ninout; i++)
1770 int j = inout_opnum[i];
1771 char buffer[16];
1773 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1774 = output_rtx[j];
1776 sprintf (buffer, "%d", j);
1777 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1778 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1781 generating_concat_p = old_generating_concat_p;
1783 /* Now, for each output, construct an rtx
1784 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1785 ARGVEC CONSTRAINTS OPNAMES))
1786 If there is more than one, put them inside a PARALLEL. */
1788 if (noutputs == 1 && nclobbers == 0)
1790 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1791 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1794 else if (noutputs == 0 && nclobbers == 0)
1796 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1797 emit_insn (body);
1800 else
1802 rtx obody = body;
1803 int num = noutputs;
1805 if (num == 0)
1806 num = 1;
1808 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1810 /* For each output operand, store a SET. */
1811 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1813 XVECEXP (body, 0, i)
1814 = gen_rtx_SET (VOIDmode,
1815 output_rtx[i],
1816 gen_rtx_ASM_OPERANDS
1817 (GET_MODE (output_rtx[i]),
1818 TREE_STRING_POINTER (string),
1819 constraints[i], i, argvec, constraintvec,
1820 locus.file, locus.line));
1822 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1825 /* If there are no outputs (but there are some clobbers)
1826 store the bare ASM_OPERANDS into the PARALLEL. */
1828 if (i == 0)
1829 XVECEXP (body, 0, i++) = obody;
1831 /* Store (clobber REG) for each clobbered register specified. */
1833 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1835 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1836 int j = decode_reg_name (regname);
1837 rtx clobbered_reg;
1839 if (j < 0)
1841 if (j == -3) /* `cc', which is not a register */
1842 continue;
1844 if (j == -4) /* `memory', don't cache memory across asm */
1846 XVECEXP (body, 0, i++)
1847 = gen_rtx_CLOBBER (VOIDmode,
1848 gen_rtx_MEM
1849 (BLKmode,
1850 gen_rtx_SCRATCH (VOIDmode)));
1851 continue;
1854 /* Ignore unknown register, error already signaled. */
1855 continue;
1858 /* Use QImode since that's guaranteed to clobber just one reg. */
1859 clobbered_reg = gen_rtx_REG (QImode, j);
1861 /* Do sanity check for overlap between clobbers and respectively
1862 input and outputs that hasn't been handled. Such overlap
1863 should have been detected and reported above. */
1864 if (!clobber_conflict_found)
1866 int opno;
1868 /* We test the old body (obody) contents to avoid tripping
1869 over the under-construction body. */
1870 for (opno = 0; opno < noutputs; opno++)
1871 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1872 internal_error ("asm clobber conflict with output operand");
1874 for (opno = 0; opno < ninputs - ninout; opno++)
1875 if (reg_overlap_mentioned_p (clobbered_reg,
1876 ASM_OPERANDS_INPUT (obody, opno)))
1877 internal_error ("asm clobber conflict with input operand");
1880 XVECEXP (body, 0, i++)
1881 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1884 emit_insn (body);
1887 /* For any outputs that needed reloading into registers, spill them
1888 back to where they belong. */
1889 for (i = 0; i < noutputs; ++i)
1890 if (real_output_rtx[i])
1891 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1893 free_temp_slots ();
1896 /* A subroutine of expand_asm_operands. Check that all operands have
1897 the same number of alternatives. Return true if so. */
1899 static bool
1900 check_operand_nalternatives (tree outputs, tree inputs)
1902 if (outputs || inputs)
1904 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1905 int nalternatives
1906 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1907 tree next = inputs;
1909 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1911 error ("too many alternatives in `asm'");
1912 return false;
1915 tmp = outputs;
1916 while (tmp)
1918 const char *constraint
1919 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1921 if (n_occurrences (',', constraint) != nalternatives)
1923 error ("operand constraints for `asm' differ in number of alternatives");
1924 return false;
1927 if (TREE_CHAIN (tmp))
1928 tmp = TREE_CHAIN (tmp);
1929 else
1930 tmp = next, next = 0;
1934 return true;
1937 /* A subroutine of expand_asm_operands. Check that all operand names
1938 are unique. Return true if so. We rely on the fact that these names
1939 are identifiers, and so have been canonicalized by get_identifier,
1940 so all we need are pointer comparisons. */
1942 static bool
1943 check_unique_operand_names (tree outputs, tree inputs)
1945 tree i, j;
1947 for (i = outputs; i ; i = TREE_CHAIN (i))
1949 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1950 if (! i_name)
1951 continue;
1953 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1954 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1955 goto failure;
1958 for (i = inputs; i ; i = TREE_CHAIN (i))
1960 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1961 if (! i_name)
1962 continue;
1964 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1965 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1966 goto failure;
1967 for (j = outputs; j ; j = TREE_CHAIN (j))
1968 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1969 goto failure;
1972 return true;
1974 failure:
1975 error ("duplicate asm operand name '%s'",
1976 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1977 return false;
1980 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1981 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1982 STRING and in the constraints to those numbers. */
1984 tree
1985 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1987 char *buffer;
1988 char *p;
1989 const char *c;
1990 tree t;
1992 check_unique_operand_names (outputs, inputs);
1994 /* Substitute [<name>] in input constraint strings. There should be no
1995 named operands in output constraints. */
1996 for (t = inputs; t ; t = TREE_CHAIN (t))
1998 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1999 if (strchr (c, '[') != NULL)
2001 p = buffer = xstrdup (c);
2002 while ((p = strchr (p, '[')) != NULL)
2003 p = resolve_operand_name_1 (p, outputs, inputs);
2004 TREE_VALUE (TREE_PURPOSE (t))
2005 = build_string (strlen (buffer), buffer);
2006 free (buffer);
2010 /* Now check for any needed substitutions in the template. */
2011 c = TREE_STRING_POINTER (string);
2012 while ((c = strchr (c, '%')) != NULL)
2014 if (c[1] == '[')
2015 break;
2016 else if (ISALPHA (c[1]) && c[2] == '[')
2017 break;
2018 else
2020 c += 1;
2021 continue;
2025 if (c)
2027 /* OK, we need to make a copy so we can perform the substitutions.
2028 Assume that we will not need extra space--we get to remove '['
2029 and ']', which means we cannot have a problem until we have more
2030 than 999 operands. */
2031 buffer = xstrdup (TREE_STRING_POINTER (string));
2032 p = buffer + (c - TREE_STRING_POINTER (string));
2034 while ((p = strchr (p, '%')) != NULL)
2036 if (p[1] == '[')
2037 p += 1;
2038 else if (ISALPHA (p[1]) && p[2] == '[')
2039 p += 2;
2040 else
2042 p += 1;
2043 continue;
2046 p = resolve_operand_name_1 (p, outputs, inputs);
2049 string = build_string (strlen (buffer), buffer);
2050 free (buffer);
2053 return string;
2056 /* A subroutine of resolve_operand_names. P points to the '[' for a
2057 potential named operand of the form [<name>]. In place, replace
2058 the name and brackets with a number. Return a pointer to the
2059 balance of the string after substitution. */
2061 static char *
2062 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2064 char *q;
2065 int op;
2066 tree t;
2067 size_t len;
2069 /* Collect the operand name. */
2070 q = strchr (p, ']');
2071 if (!q)
2073 error ("missing close brace for named operand");
2074 return strchr (p, '\0');
2076 len = q - p - 1;
2078 /* Resolve the name to a number. */
2079 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2081 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2082 if (name)
2084 const char *c = TREE_STRING_POINTER (name);
2085 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2086 goto found;
2089 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2091 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2092 if (name)
2094 const char *c = TREE_STRING_POINTER (name);
2095 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2096 goto found;
2100 *q = '\0';
2101 error ("undefined named operand '%s'", p + 1);
2102 op = 0;
2103 found:
2105 /* Replace the name with the number. Unfortunately, not all libraries
2106 get the return value of sprintf correct, so search for the end of the
2107 generated string by hand. */
2108 sprintf (p, "%d", op);
2109 p = strchr (p, '\0');
2111 /* Verify the no extra buffer space assumption. */
2112 if (p > q)
2113 abort ();
2115 /* Shift the rest of the buffer down to fill the gap. */
2116 memmove (p, q + 1, strlen (q + 1) + 1);
2118 return p;
2121 /* Generate RTL to evaluate the expression EXP
2122 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2123 Provided just for backward-compatibility. expand_expr_stmt_value()
2124 should be used for new code. */
2126 void
2127 expand_expr_stmt (tree exp)
2129 expand_expr_stmt_value (exp, -1, 1);
2132 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2133 whether to (1) save the value of the expression, (0) discard it or
2134 (-1) use expr_stmts_for_value to tell. The use of -1 is
2135 deprecated, and retained only for backward compatibility. */
2137 void
2138 expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2140 rtx value;
2141 tree type;
2142 rtx alt_rtl = NULL;
2144 if (want_value == -1)
2145 want_value = expr_stmts_for_value != 0;
2147 /* If -Wextra, warn about statements with no side effects,
2148 except for an explicit cast to void (e.g. for assert()), and
2149 except for last statement in ({...}) where they may be useful. */
2150 if (! want_value
2151 && (expr_stmts_for_value == 0 || ! maybe_last)
2152 && exp != error_mark_node
2153 && warn_unused_value)
2155 if (TREE_SIDE_EFFECTS (exp))
2156 warn_if_unused_value (exp);
2157 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
2158 warning ("%Hstatement with no effect", &emit_locus);
2161 /* If EXP is of function type and we are expanding statements for
2162 value, convert it to pointer-to-function. */
2163 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2164 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2166 /* The call to `expand_expr' could cause last_expr_type and
2167 last_expr_value to get reset. Therefore, we set last_expr_value
2168 and last_expr_type *after* calling expand_expr. */
2169 value = expand_expr_real (exp, want_value ? NULL_RTX : const0_rtx,
2170 VOIDmode, 0, &alt_rtl);
2171 type = TREE_TYPE (exp);
2173 /* If all we do is reference a volatile value in memory,
2174 copy it to a register to be sure it is actually touched. */
2175 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2177 if (TYPE_MODE (type) == VOIDmode)
2179 else if (TYPE_MODE (type) != BLKmode)
2180 value = copy_to_reg (value);
2181 else
2183 rtx lab = gen_label_rtx ();
2185 /* Compare the value with itself to reference it. */
2186 emit_cmp_and_jump_insns (value, value, EQ,
2187 expand_expr (TYPE_SIZE (type),
2188 NULL_RTX, VOIDmode, 0),
2189 BLKmode, 0, lab);
2190 emit_label (lab);
2194 /* If this expression is part of a ({...}) and is in memory, we may have
2195 to preserve temporaries. */
2196 preserve_temp_slots (value);
2198 /* Free any temporaries used to evaluate this expression. Any temporary
2199 used as a result of this expression will already have been preserved
2200 above. */
2201 free_temp_slots ();
2203 if (want_value)
2205 last_expr_value = value;
2206 last_expr_alt_rtl = alt_rtl;
2207 last_expr_type = type;
2210 emit_queue ();
2213 /* Warn if EXP contains any computations whose results are not used.
2214 Return 1 if a warning is printed; 0 otherwise. */
2217 warn_if_unused_value (tree exp)
2219 if (TREE_USED (exp))
2220 return 0;
2222 /* Don't warn about void constructs. This includes casting to void,
2223 void function calls, and statement expressions with a final cast
2224 to void. */
2225 if (VOID_TYPE_P (TREE_TYPE (exp)))
2226 return 0;
2228 switch (TREE_CODE (exp))
2230 case PREINCREMENT_EXPR:
2231 case POSTINCREMENT_EXPR:
2232 case PREDECREMENT_EXPR:
2233 case POSTDECREMENT_EXPR:
2234 case MODIFY_EXPR:
2235 case INIT_EXPR:
2236 case TARGET_EXPR:
2237 case CALL_EXPR:
2238 case RTL_EXPR:
2239 case TRY_CATCH_EXPR:
2240 case WITH_CLEANUP_EXPR:
2241 case EXIT_EXPR:
2242 return 0;
2244 case BIND_EXPR:
2245 /* For a binding, warn if no side effect within it. */
2246 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2248 case SAVE_EXPR:
2249 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2251 case TRUTH_ORIF_EXPR:
2252 case TRUTH_ANDIF_EXPR:
2253 /* In && or ||, warn if 2nd operand has no side effect. */
2254 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2256 case COMPOUND_EXPR:
2257 if (TREE_NO_UNUSED_WARNING (exp))
2258 return 0;
2259 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2260 return 1;
2261 /* Let people do `(foo (), 0)' without a warning. */
2262 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2263 return 0;
2264 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2266 case NOP_EXPR:
2267 case CONVERT_EXPR:
2268 case NON_LVALUE_EXPR:
2269 /* Don't warn about conversions not explicit in the user's program. */
2270 if (TREE_NO_UNUSED_WARNING (exp))
2271 return 0;
2272 /* Assignment to a cast usually results in a cast of a modify.
2273 Don't complain about that. There can be an arbitrary number of
2274 casts before the modify, so we must loop until we find the first
2275 non-cast expression and then test to see if that is a modify. */
2277 tree tem = TREE_OPERAND (exp, 0);
2279 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2280 tem = TREE_OPERAND (tem, 0);
2282 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2283 || TREE_CODE (tem) == CALL_EXPR)
2284 return 0;
2286 goto maybe_warn;
2288 case INDIRECT_REF:
2289 /* Don't warn about automatic dereferencing of references, since
2290 the user cannot control it. */
2291 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2292 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2293 /* Fall through. */
2295 default:
2296 /* Referencing a volatile value is a side effect, so don't warn. */
2297 if ((DECL_P (exp)
2298 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2299 && TREE_THIS_VOLATILE (exp))
2300 return 0;
2302 /* If this is an expression which has no operands, there is no value
2303 to be unused. There are no such language-independent codes,
2304 but front ends may define such. */
2305 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2306 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2307 return 0;
2309 maybe_warn:
2310 /* If this is an expression with side effects, don't warn. */
2311 if (TREE_SIDE_EFFECTS (exp))
2312 return 0;
2314 warning ("%Hvalue computed is not used", &emit_locus);
2315 return 1;
2319 /* Clear out the memory of the last expression evaluated. */
2321 void
2322 clear_last_expr (void)
2324 last_expr_type = NULL_TREE;
2325 last_expr_value = NULL_RTX;
2326 last_expr_alt_rtl = NULL_RTX;
2329 /* Begin a statement-expression, i.e., a series of statements which
2330 may return a value. Return the RTL_EXPR for this statement expr.
2331 The caller must save that value and pass it to
2332 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2333 in the statement-expression are deallocated at the end of the
2334 expression. */
2336 tree
2337 expand_start_stmt_expr (int has_scope)
2339 tree t;
2341 /* Make the RTL_EXPR node temporary, not momentary,
2342 so that rtl_expr_chain doesn't become garbage. */
2343 t = make_node (RTL_EXPR);
2344 do_pending_stack_adjust ();
2345 if (has_scope)
2346 start_sequence_for_rtl_expr (t);
2347 else
2348 start_sequence ();
2349 NO_DEFER_POP;
2350 expr_stmts_for_value++;
2351 return t;
2354 /* Restore the previous state at the end of a statement that returns a value.
2355 Returns a tree node representing the statement's value and the
2356 insns to compute the value.
2358 The nodes of that expression have been freed by now, so we cannot use them.
2359 But we don't want to do that anyway; the expression has already been
2360 evaluated and now we just want to use the value. So generate a RTL_EXPR
2361 with the proper type and RTL value.
2363 If the last substatement was not an expression,
2364 return something with type `void'. */
2366 tree
2367 expand_end_stmt_expr (tree t)
2369 OK_DEFER_POP;
2371 if (! last_expr_value || ! last_expr_type)
2373 last_expr_value = const0_rtx;
2374 last_expr_alt_rtl = NULL_RTX;
2375 last_expr_type = void_type_node;
2377 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2378 /* Remove any possible QUEUED. */
2379 last_expr_value = protect_from_queue (last_expr_value, 0);
2381 emit_queue ();
2383 TREE_TYPE (t) = last_expr_type;
2384 RTL_EXPR_RTL (t) = last_expr_value;
2385 RTL_EXPR_ALT_RTL (t) = last_expr_alt_rtl;
2386 RTL_EXPR_SEQUENCE (t) = get_insns ();
2388 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2390 end_sequence ();
2392 /* Don't consider deleting this expr or containing exprs at tree level. */
2393 TREE_SIDE_EFFECTS (t) = 1;
2394 /* Propagate volatility of the actual RTL expr. */
2395 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2397 clear_last_expr ();
2398 expr_stmts_for_value--;
2400 return t;
2403 /* Generate RTL for the start of an if-then. COND is the expression
2404 whose truth should be tested.
2406 If EXITFLAG is nonzero, this conditional is visible to
2407 `exit_something'. */
2409 void
2410 expand_start_cond (tree cond, int exitflag)
2412 struct nesting *thiscond = ALLOC_NESTING ();
2414 /* Make an entry on cond_stack for the cond we are entering. */
2416 thiscond->desc = COND_NESTING;
2417 thiscond->next = cond_stack;
2418 thiscond->all = nesting_stack;
2419 thiscond->depth = ++nesting_depth;
2420 thiscond->data.cond.next_label = gen_label_rtx ();
2421 /* Before we encounter an `else', we don't need a separate exit label
2422 unless there are supposed to be exit statements
2423 to exit this conditional. */
2424 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2425 thiscond->data.cond.endif_label = thiscond->exit_label;
2426 cond_stack = thiscond;
2427 nesting_stack = thiscond;
2429 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2432 /* Generate RTL between then-clause and the elseif-clause
2433 of an if-then-elseif-.... */
2435 void
2436 expand_start_elseif (tree cond)
2438 if (cond_stack->data.cond.endif_label == 0)
2439 cond_stack->data.cond.endif_label = gen_label_rtx ();
2440 emit_jump (cond_stack->data.cond.endif_label);
2441 emit_label (cond_stack->data.cond.next_label);
2442 cond_stack->data.cond.next_label = gen_label_rtx ();
2443 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2446 /* Generate RTL between the then-clause and the else-clause
2447 of an if-then-else. */
2449 void
2450 expand_start_else (void)
2452 if (cond_stack->data.cond.endif_label == 0)
2453 cond_stack->data.cond.endif_label = gen_label_rtx ();
2455 emit_jump (cond_stack->data.cond.endif_label);
2456 emit_label (cond_stack->data.cond.next_label);
2457 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2460 /* After calling expand_start_else, turn this "else" into an "else if"
2461 by providing another condition. */
2463 void
2464 expand_elseif (tree cond)
2466 cond_stack->data.cond.next_label = gen_label_rtx ();
2467 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2470 /* Generate RTL for the end of an if-then.
2471 Pop the record for it off of cond_stack. */
2473 void
2474 expand_end_cond (void)
2476 struct nesting *thiscond = cond_stack;
2478 do_pending_stack_adjust ();
2479 if (thiscond->data.cond.next_label)
2480 emit_label (thiscond->data.cond.next_label);
2481 if (thiscond->data.cond.endif_label)
2482 emit_label (thiscond->data.cond.endif_label);
2484 POPSTACK (cond_stack);
2485 clear_last_expr ();
2488 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2489 loop should be exited by `exit_something'. This is a loop for which
2490 `expand_continue' will jump to the top of the loop.
2492 Make an entry on loop_stack to record the labels associated with
2493 this loop. */
2495 struct nesting *
2496 expand_start_loop (int exit_flag)
2498 struct nesting *thisloop = ALLOC_NESTING ();
2500 /* Make an entry on loop_stack for the loop we are entering. */
2502 thisloop->desc = LOOP_NESTING;
2503 thisloop->next = loop_stack;
2504 thisloop->all = nesting_stack;
2505 thisloop->depth = ++nesting_depth;
2506 thisloop->data.loop.start_label = gen_label_rtx ();
2507 thisloop->data.loop.end_label = gen_label_rtx ();
2508 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2509 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2510 loop_stack = thisloop;
2511 nesting_stack = thisloop;
2513 do_pending_stack_adjust ();
2514 emit_queue ();
2515 emit_note (NOTE_INSN_LOOP_BEG);
2516 emit_label (thisloop->data.loop.start_label);
2518 return thisloop;
2521 /* Like expand_start_loop but for a loop where the continuation point
2522 (for expand_continue_loop) will be specified explicitly. */
2524 struct nesting *
2525 expand_start_loop_continue_elsewhere (int exit_flag)
2527 struct nesting *thisloop = expand_start_loop (exit_flag);
2528 loop_stack->data.loop.continue_label = gen_label_rtx ();
2529 return thisloop;
2532 /* Begin a null, aka do { } while (0) "loop". But since the contents
2533 of said loop can still contain a break, we must frob the loop nest. */
2535 struct nesting *
2536 expand_start_null_loop (void)
2538 struct nesting *thisloop = ALLOC_NESTING ();
2540 /* Make an entry on loop_stack for the loop we are entering. */
2542 thisloop->desc = LOOP_NESTING;
2543 thisloop->next = loop_stack;
2544 thisloop->all = nesting_stack;
2545 thisloop->depth = ++nesting_depth;
2546 thisloop->data.loop.start_label = emit_note (NOTE_INSN_DELETED);
2547 thisloop->data.loop.end_label = gen_label_rtx ();
2548 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2549 thisloop->exit_label = thisloop->data.loop.end_label;
2550 loop_stack = thisloop;
2551 nesting_stack = thisloop;
2553 return thisloop;
2556 /* Specify the continuation point for a loop started with
2557 expand_start_loop_continue_elsewhere.
2558 Use this at the point in the code to which a continue statement
2559 should jump. */
2561 void
2562 expand_loop_continue_here (void)
2564 do_pending_stack_adjust ();
2565 emit_note (NOTE_INSN_LOOP_CONT);
2566 emit_label (loop_stack->data.loop.continue_label);
2569 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2570 Pop the block off of loop_stack. */
2572 void
2573 expand_end_loop (void)
2575 rtx start_label = loop_stack->data.loop.start_label;
2576 rtx etc_note;
2577 int eh_regions, debug_blocks;
2578 bool empty_test;
2580 /* Mark the continue-point at the top of the loop if none elsewhere. */
2581 if (start_label == loop_stack->data.loop.continue_label)
2582 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2584 do_pending_stack_adjust ();
2586 /* If the loop starts with a loop exit, roll that to the end where
2587 it will optimize together with the jump back.
2589 If the loop presently looks like this (in pseudo-C):
2591 LOOP_BEG
2592 start_label:
2593 if (test) goto end_label;
2594 LOOP_END_TOP_COND
2595 body;
2596 goto start_label;
2597 end_label:
2599 transform it to look like:
2601 LOOP_BEG
2602 goto start_label;
2603 top_label:
2604 body;
2605 start_label:
2606 if (test) goto end_label;
2607 goto top_label;
2608 end_label:
2610 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2611 the end of the entry conditional. Without this, our lexical scan
2612 can't tell the difference between an entry conditional and a
2613 body conditional that exits the loop. Mistaking the two means
2614 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2615 screw up loop unrolling.
2617 Things will be oh so much better when loop optimization is done
2618 off of a proper control flow graph... */
2620 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2622 empty_test = true;
2623 eh_regions = debug_blocks = 0;
2624 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2625 if (GET_CODE (etc_note) == NOTE)
2627 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2628 break;
2630 /* We must not walk into a nested loop. */
2631 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2633 etc_note = NULL_RTX;
2634 break;
2637 /* At the same time, scan for EH region notes, as we don't want
2638 to scrog region nesting. This shouldn't happen, but... */
2639 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2640 eh_regions++;
2641 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2643 if (--eh_regions < 0)
2644 /* We've come to the end of an EH region, but never saw the
2645 beginning of that region. That means that an EH region
2646 begins before the top of the loop, and ends in the middle
2647 of it. The existence of such a situation violates a basic
2648 assumption in this code, since that would imply that even
2649 when EH_REGIONS is zero, we might move code out of an
2650 exception region. */
2651 abort ();
2654 /* Likewise for debug scopes. In this case we'll either (1) move
2655 all of the notes if they are properly nested or (2) leave the
2656 notes alone and only rotate the loop at high optimization
2657 levels when we expect to scrog debug info. */
2658 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2659 debug_blocks++;
2660 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2661 debug_blocks--;
2663 else if (INSN_P (etc_note))
2664 empty_test = false;
2666 if (etc_note
2667 && optimize
2668 && ! empty_test
2669 && eh_regions == 0
2670 && (debug_blocks == 0 || optimize >= 2)
2671 && NEXT_INSN (etc_note) != NULL_RTX
2672 && ! any_condjump_p (get_last_insn ()))
2674 /* We found one. Move everything from START to ETC to the end
2675 of the loop, and add a jump from the top of the loop. */
2676 rtx top_label = gen_label_rtx ();
2677 rtx start_move = start_label;
2679 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2680 then we want to move this note also. */
2681 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2682 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2683 start_move = PREV_INSN (start_move);
2685 emit_label_before (top_label, start_move);
2687 /* Actually move the insns. If the debug scopes are nested, we
2688 can move everything at once. Otherwise we have to move them
2689 one by one and squeeze out the block notes. */
2690 if (debug_blocks == 0)
2691 reorder_insns (start_move, etc_note, get_last_insn ());
2692 else
2694 rtx insn, next_insn;
2695 for (insn = start_move; insn; insn = next_insn)
2697 /* Figure out which insn comes after this one. We have
2698 to do this before we move INSN. */
2699 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2701 if (GET_CODE (insn) == NOTE
2702 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2703 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2704 continue;
2706 reorder_insns (insn, insn, get_last_insn ());
2710 /* Add the jump from the top of the loop. */
2711 emit_jump_insn_before (gen_jump (start_label), top_label);
2712 emit_barrier_before (top_label);
2713 start_label = top_label;
2716 emit_jump (start_label);
2717 emit_note (NOTE_INSN_LOOP_END);
2718 emit_label (loop_stack->data.loop.end_label);
2720 POPSTACK (loop_stack);
2722 clear_last_expr ();
2725 /* Finish a null loop, aka do { } while (0). */
2727 void
2728 expand_end_null_loop (void)
2730 do_pending_stack_adjust ();
2731 emit_label (loop_stack->data.loop.end_label);
2733 POPSTACK (loop_stack);
2735 clear_last_expr ();
2738 /* Generate a jump to the current loop's continue-point.
2739 This is usually the top of the loop, but may be specified
2740 explicitly elsewhere. If not currently inside a loop,
2741 return 0 and do nothing; caller will print an error message. */
2744 expand_continue_loop (struct nesting *whichloop)
2746 /* Emit information for branch prediction. */
2747 rtx note;
2749 if (flag_guess_branch_prob)
2751 note = emit_note (NOTE_INSN_PREDICTION);
2752 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2754 clear_last_expr ();
2755 if (whichloop == 0)
2756 whichloop = loop_stack;
2757 if (whichloop == 0)
2758 return 0;
2759 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2760 NULL_RTX);
2761 return 1;
2764 /* Generate a jump to exit the current loop. If not currently inside a loop,
2765 return 0 and do nothing; caller will print an error message. */
2768 expand_exit_loop (struct nesting *whichloop)
2770 clear_last_expr ();
2771 if (whichloop == 0)
2772 whichloop = loop_stack;
2773 if (whichloop == 0)
2774 return 0;
2775 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2776 return 1;
2779 /* Generate a conditional jump to exit the current loop if COND
2780 evaluates to zero. If not currently inside a loop,
2781 return 0 and do nothing; caller will print an error message. */
2784 expand_exit_loop_if_false (struct nesting *whichloop, tree cond)
2786 rtx label;
2787 clear_last_expr ();
2789 if (whichloop == 0)
2790 whichloop = loop_stack;
2791 if (whichloop == 0)
2792 return 0;
2794 if (integer_nonzerop (cond))
2795 return 1;
2796 if (integer_zerop (cond))
2797 return expand_exit_loop (whichloop);
2799 /* Check if we definitely won't need a fixup. */
2800 if (whichloop == nesting_stack)
2802 jumpifnot (cond, whichloop->data.loop.end_label);
2803 return 1;
2806 /* In order to handle fixups, we actually create a conditional jump
2807 around an unconditional branch to exit the loop. If fixups are
2808 necessary, they go before the unconditional branch. */
2810 label = gen_label_rtx ();
2811 jumpif (cond, label);
2812 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2813 NULL_RTX);
2814 emit_label (label);
2816 return 1;
2819 /* Like expand_exit_loop_if_false except also emit a note marking
2820 the end of the conditional. Should only be used immediately
2821 after expand_loop_start. */
2824 expand_exit_loop_top_cond (struct nesting *whichloop, tree cond)
2826 if (! expand_exit_loop_if_false (whichloop, cond))
2827 return 0;
2829 emit_note (NOTE_INSN_LOOP_END_TOP_COND);
2830 return 1;
2833 /* Return nonzero if we should preserve sub-expressions as separate
2834 pseudos. We never do so if we aren't optimizing. We always do so
2835 if -fexpensive-optimizations.
2837 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2838 the loop may still be a small one. */
2841 preserve_subexpressions_p (void)
2843 rtx insn;
2845 if (flag_expensive_optimizations)
2846 return 1;
2848 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2849 return 0;
2851 insn = get_last_insn_anywhere ();
2853 return (insn
2854 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2855 < n_non_fixed_regs * 3));
2859 /* Generate a jump to exit the current loop, conditional, binding contour
2860 or case statement. Not all such constructs are visible to this function,
2861 only those started with EXIT_FLAG nonzero. Individual languages use
2862 the EXIT_FLAG parameter to control which kinds of constructs you can
2863 exit this way.
2865 If not currently inside anything that can be exited,
2866 return 0 and do nothing; caller will print an error message. */
2869 expand_exit_something (void)
2871 struct nesting *n;
2872 clear_last_expr ();
2873 for (n = nesting_stack; n; n = n->all)
2874 if (n->exit_label != 0)
2876 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2877 return 1;
2880 return 0;
2883 /* Generate RTL to return from the current function, with no value.
2884 (That is, we do not do anything about returning any value.) */
2886 void
2887 expand_null_return (void)
2889 rtx last_insn;
2891 last_insn = get_last_insn ();
2893 /* If this function was declared to return a value, but we
2894 didn't, clobber the return registers so that they are not
2895 propagated live to the rest of the function. */
2896 clobber_return_register ();
2898 expand_null_return_1 (last_insn);
2901 /* Generate RTL to return directly from the current function.
2902 (That is, we bypass any return value.) */
2904 void
2905 expand_naked_return (void)
2907 rtx last_insn, end_label;
2909 last_insn = get_last_insn ();
2910 end_label = naked_return_label;
2912 clear_pending_stack_adjust ();
2913 do_pending_stack_adjust ();
2914 clear_last_expr ();
2916 if (end_label == 0)
2917 end_label = naked_return_label = gen_label_rtx ();
2918 expand_goto_internal (NULL_TREE, end_label, last_insn);
2921 /* Try to guess whether the value of return means error code. */
2922 static enum br_predictor
2923 return_prediction (rtx val)
2925 /* Different heuristics for pointers and scalars. */
2926 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2928 /* NULL is usually not returned. */
2929 if (val == const0_rtx)
2930 return PRED_NULL_RETURN;
2932 else
2934 /* Negative return values are often used to indicate
2935 errors. */
2936 if (GET_CODE (val) == CONST_INT
2937 && INTVAL (val) < 0)
2938 return PRED_NEGATIVE_RETURN;
2939 /* Constant return values are also usually erors,
2940 zero/one often mean booleans so exclude them from the
2941 heuristics. */
2942 if (CONSTANT_P (val)
2943 && (val != const0_rtx && val != const1_rtx))
2944 return PRED_CONST_RETURN;
2946 return PRED_NO_PREDICTION;
2950 /* If the current function returns values in the most significant part
2951 of a register, shift return value VAL appropriately. The mode of
2952 the function's return type is known not to be BLKmode. */
2954 static rtx
2955 shift_return_value (rtx val)
2957 tree type;
2959 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2960 if (targetm.calls.return_in_msb (type))
2962 rtx target;
2963 HOST_WIDE_INT shift;
2965 target = DECL_RTL (DECL_RESULT (current_function_decl));
2966 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2967 - BITS_PER_UNIT * int_size_in_bytes (type));
2968 if (shift > 0)
2969 val = expand_binop (GET_MODE (target), ashl_optab,
2970 gen_lowpart (GET_MODE (target), val),
2971 GEN_INT (shift), target, 1, OPTAB_WIDEN);
2973 return val;
2977 /* Generate RTL to return from the current function, with value VAL. */
2979 static void
2980 expand_value_return (rtx val)
2982 rtx last_insn;
2983 rtx return_reg;
2984 enum br_predictor pred;
2986 if (flag_guess_branch_prob
2987 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2989 /* Emit information for branch prediction. */
2990 rtx note;
2992 note = emit_note (NOTE_INSN_PREDICTION);
2994 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2998 last_insn = get_last_insn ();
2999 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
3001 /* Copy the value to the return location
3002 unless it's already there. */
3004 if (return_reg != val)
3006 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3007 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
3009 int unsignedp = TREE_UNSIGNED (type);
3010 enum machine_mode old_mode
3011 = DECL_MODE (DECL_RESULT (current_function_decl));
3012 enum machine_mode mode
3013 = promote_mode (type, old_mode, &unsignedp, 1);
3015 if (mode != old_mode)
3016 val = convert_modes (mode, old_mode, val, unsignedp);
3018 if (GET_CODE (return_reg) == PARALLEL)
3019 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3020 else
3021 emit_move_insn (return_reg, val);
3024 expand_null_return_1 (last_insn);
3027 /* Output a return with no value. If LAST_INSN is nonzero,
3028 pretend that the return takes place after LAST_INSN. */
3030 static void
3031 expand_null_return_1 (rtx last_insn)
3033 rtx end_label = cleanup_label ? cleanup_label : return_label;
3035 clear_pending_stack_adjust ();
3036 do_pending_stack_adjust ();
3037 clear_last_expr ();
3039 if (end_label == 0)
3040 end_label = return_label = gen_label_rtx ();
3041 expand_goto_internal (NULL_TREE, end_label, last_insn);
3044 /* Generate RTL to evaluate the expression RETVAL and return it
3045 from the current function. */
3047 void
3048 expand_return (tree retval)
3050 /* If there are any cleanups to be performed, then they will
3051 be inserted following LAST_INSN. It is desirable
3052 that the last_insn, for such purposes, should be the
3053 last insn before computing the return value. Otherwise, cleanups
3054 which call functions can clobber the return value. */
3055 /* ??? rms: I think that is erroneous, because in C++ it would
3056 run destructors on variables that might be used in the subsequent
3057 computation of the return value. */
3058 rtx last_insn = 0;
3059 rtx result_rtl;
3060 rtx val = 0;
3061 tree retval_rhs;
3063 /* If function wants no value, give it none. */
3064 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3066 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3067 emit_queue ();
3068 expand_null_return ();
3069 return;
3072 if (retval == error_mark_node)
3074 /* Treat this like a return of no value from a function that
3075 returns a value. */
3076 expand_null_return ();
3077 return;
3079 else if (TREE_CODE (retval) == RESULT_DECL)
3080 retval_rhs = retval;
3081 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3082 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3083 retval_rhs = TREE_OPERAND (retval, 1);
3084 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3085 /* Recognize tail-recursive call to void function. */
3086 retval_rhs = retval;
3087 else
3088 retval_rhs = NULL_TREE;
3090 last_insn = get_last_insn ();
3092 /* Distribute return down conditional expr if either of the sides
3093 may involve tail recursion (see test below). This enhances the number
3094 of tail recursions we see. Don't do this always since it can produce
3095 sub-optimal code in some cases and we distribute assignments into
3096 conditional expressions when it would help. */
3098 if (optimize && retval_rhs != 0
3099 && frame_offset == 0
3100 && TREE_CODE (retval_rhs) == COND_EXPR
3101 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3102 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3104 rtx label = gen_label_rtx ();
3105 tree expr;
3107 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3108 start_cleanup_deferral ();
3109 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3110 DECL_RESULT (current_function_decl),
3111 TREE_OPERAND (retval_rhs, 1));
3112 TREE_SIDE_EFFECTS (expr) = 1;
3113 expand_return (expr);
3114 emit_label (label);
3116 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3117 DECL_RESULT (current_function_decl),
3118 TREE_OPERAND (retval_rhs, 2));
3119 TREE_SIDE_EFFECTS (expr) = 1;
3120 expand_return (expr);
3121 end_cleanup_deferral ();
3122 return;
3125 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3127 /* If the result is an aggregate that is being returned in one (or more)
3128 registers, load the registers here. The compiler currently can't handle
3129 copying a BLKmode value into registers. We could put this code in a
3130 more general area (for use by everyone instead of just function
3131 call/return), but until this feature is generally usable it is kept here
3132 (and in expand_call). The value must go into a pseudo in case there
3133 are cleanups that will clobber the real return register. */
3135 if (retval_rhs != 0
3136 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3137 && GET_CODE (result_rtl) == REG)
3139 int i;
3140 unsigned HOST_WIDE_INT bitpos, xbitpos;
3141 unsigned HOST_WIDE_INT padding_correction = 0;
3142 unsigned HOST_WIDE_INT bytes
3143 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3144 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3145 unsigned int bitsize
3146 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3147 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
3148 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3149 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3150 enum machine_mode tmpmode, result_reg_mode;
3152 if (bytes == 0)
3154 expand_null_return ();
3155 return;
3158 /* If the structure doesn't take up a whole number of words, see
3159 whether the register value should be padded on the left or on
3160 the right. Set PADDING_CORRECTION to the number of padding
3161 bits needed on the left side.
3163 In most ABIs, the structure will be returned at the least end of
3164 the register, which translates to right padding on little-endian
3165 targets and left padding on big-endian targets. The opposite
3166 holds if the structure is returned at the most significant
3167 end of the register. */
3168 if (bytes % UNITS_PER_WORD != 0
3169 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
3170 ? !BYTES_BIG_ENDIAN
3171 : BYTES_BIG_ENDIAN))
3172 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3173 * BITS_PER_UNIT));
3175 /* Copy the structure BITSIZE bits at a time. */
3176 for (bitpos = 0, xbitpos = padding_correction;
3177 bitpos < bytes * BITS_PER_UNIT;
3178 bitpos += bitsize, xbitpos += bitsize)
3180 /* We need a new destination pseudo each time xbitpos is
3181 on a word boundary and when xbitpos == padding_correction
3182 (the first time through). */
3183 if (xbitpos % BITS_PER_WORD == 0
3184 || xbitpos == padding_correction)
3186 /* Generate an appropriate register. */
3187 dst = gen_reg_rtx (word_mode);
3188 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3190 /* Clear the destination before we move anything into it. */
3191 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3194 /* We need a new source operand each time bitpos is on a word
3195 boundary. */
3196 if (bitpos % BITS_PER_WORD == 0)
3197 src = operand_subword_force (result_val,
3198 bitpos / BITS_PER_WORD,
3199 BLKmode);
3201 /* Use bitpos for the source extraction (left justified) and
3202 xbitpos for the destination store (right justified). */
3203 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3204 extract_bit_field (src, bitsize,
3205 bitpos % BITS_PER_WORD, 1,
3206 NULL_RTX, word_mode, word_mode,
3207 BITS_PER_WORD),
3208 BITS_PER_WORD);
3211 tmpmode = GET_MODE (result_rtl);
3212 if (tmpmode == BLKmode)
3214 /* Find the smallest integer mode large enough to hold the
3215 entire structure and use that mode instead of BLKmode
3216 on the USE insn for the return register. */
3217 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3218 tmpmode != VOIDmode;
3219 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3220 /* Have we found a large enough mode? */
3221 if (GET_MODE_SIZE (tmpmode) >= bytes)
3222 break;
3224 /* No suitable mode found. */
3225 if (tmpmode == VOIDmode)
3226 abort ();
3228 PUT_MODE (result_rtl, tmpmode);
3231 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3232 result_reg_mode = word_mode;
3233 else
3234 result_reg_mode = tmpmode;
3235 result_reg = gen_reg_rtx (result_reg_mode);
3237 emit_queue ();
3238 for (i = 0; i < n_regs; i++)
3239 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3240 result_pseudos[i]);
3242 if (tmpmode != result_reg_mode)
3243 result_reg = gen_lowpart (tmpmode, result_reg);
3245 expand_value_return (result_reg);
3247 else if (retval_rhs != 0
3248 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3249 && (GET_CODE (result_rtl) == REG
3250 || (GET_CODE (result_rtl) == PARALLEL)))
3252 /* Calculate the return value into a temporary (usually a pseudo
3253 reg). */
3254 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3255 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3257 val = assign_temp (nt, 0, 0, 1);
3258 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3259 val = force_not_mem (val);
3260 emit_queue ();
3261 /* Return the calculated value, doing cleanups first. */
3262 expand_value_return (shift_return_value (val));
3264 else
3266 /* No cleanups or no hard reg used;
3267 calculate value into hard return reg. */
3268 expand_expr (retval, const0_rtx, VOIDmode, 0);
3269 emit_queue ();
3270 expand_value_return (result_rtl);
3274 /* Attempt to optimize a potential tail recursion call into a goto.
3275 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3276 where to place the jump to the tail recursion label.
3278 Return TRUE if the call was optimized into a goto. */
3281 optimize_tail_recursion (tree arguments, rtx last_insn)
3283 /* Finish checking validity, and if valid emit code to set the
3284 argument variables for the new call. */
3285 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3287 if (tail_recursion_label == 0)
3289 tail_recursion_label = gen_label_rtx ();
3290 emit_label_after (tail_recursion_label,
3291 tail_recursion_reentry);
3293 emit_queue ();
3294 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3295 emit_barrier ();
3296 return 1;
3298 return 0;
3301 /* Emit code to alter this function's formal parms for a tail-recursive call.
3302 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3303 FORMALS is the chain of decls of formals.
3304 Return 1 if this can be done;
3305 otherwise return 0 and do not emit any code. */
3307 static int
3308 tail_recursion_args (tree actuals, tree formals)
3310 tree a = actuals, f = formals;
3311 int i;
3312 rtx *argvec;
3314 /* Check that number and types of actuals are compatible
3315 with the formals. This is not always true in valid C code.
3316 Also check that no formal needs to be addressable
3317 and that all formals are scalars. */
3319 /* Also count the args. */
3321 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3323 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3324 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3325 return 0;
3326 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3327 return 0;
3329 if (a != 0 || f != 0)
3330 return 0;
3332 /* Compute all the actuals. */
3334 argvec = alloca (i * sizeof (rtx));
3336 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3337 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3339 /* Find which actual values refer to current values of previous formals.
3340 Copy each of them now, before any formal is changed. */
3342 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3344 int copy = 0;
3345 int j;
3346 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3347 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3349 copy = 1;
3350 break;
3352 if (copy)
3353 argvec[i] = copy_to_reg (argvec[i]);
3356 /* Insert the pre-call sequence point. This is important in cases
3357 where the actual values post-modify the formals: we want the final
3358 values of the formals to be the ones that we assign below, not the
3359 result of the post-modification. */
3360 emit_queue ();
3362 /* Store the values of the actuals into the formals. */
3364 for (f = formals, a = actuals, i = 0; f;
3365 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3367 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3368 emit_move_insn (DECL_RTL (f), argvec[i]);
3369 else
3371 rtx tmp = argvec[i];
3372 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
3373 promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3374 &unsignedp, 0);
3375 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3377 tmp = gen_reg_rtx (DECL_MODE (f));
3378 convert_move (tmp, argvec[i], unsignedp);
3380 convert_move (DECL_RTL (f), tmp, unsignedp);
3384 free_temp_slots ();
3385 return 1;
3388 /* Generate the RTL code for entering a binding contour.
3389 The variables are declared one by one, by calls to `expand_decl'.
3391 FLAGS is a bitwise or of the following flags:
3393 1 - Nonzero if this construct should be visible to
3394 `exit_something'.
3396 2 - Nonzero if this contour does not require a
3397 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3398 language-independent code should set this flag because they
3399 will not create corresponding BLOCK nodes. (There should be
3400 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3401 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3402 when expand_end_bindings is called.
3404 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3405 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3406 note. */
3408 void
3409 expand_start_bindings_and_block (int flags, tree block)
3411 struct nesting *thisblock = ALLOC_NESTING ();
3412 rtx note;
3413 int exit_flag = ((flags & 1) != 0);
3414 int block_flag = ((flags & 2) == 0);
3416 /* If a BLOCK is supplied, then the caller should be requesting a
3417 NOTE_INSN_BLOCK_BEG note. */
3418 if (!block_flag && block)
3419 abort ();
3421 /* Create a note to mark the beginning of the block. */
3422 if (block_flag)
3424 note = emit_note (NOTE_INSN_BLOCK_BEG);
3425 NOTE_BLOCK (note) = block;
3427 else
3428 note = emit_note (NOTE_INSN_DELETED);
3430 /* Make an entry on block_stack for the block we are entering. */
3432 thisblock->desc = BLOCK_NESTING;
3433 thisblock->next = block_stack;
3434 thisblock->all = nesting_stack;
3435 thisblock->depth = ++nesting_depth;
3436 thisblock->data.block.stack_level = 0;
3437 thisblock->data.block.cleanups = 0;
3438 thisblock->data.block.exception_region = 0;
3439 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3441 thisblock->data.block.conditional_code = 0;
3442 thisblock->data.block.last_unconditional_cleanup = note;
3443 /* When we insert instructions after the last unconditional cleanup,
3444 we don't adjust last_insn. That means that a later add_insn will
3445 clobber the instructions we've just added. The easiest way to
3446 fix this is to just insert another instruction here, so that the
3447 instructions inserted after the last unconditional cleanup are
3448 never the last instruction. */
3449 emit_note (NOTE_INSN_DELETED);
3451 if (block_stack
3452 && !(block_stack->data.block.cleanups == NULL_TREE
3453 && block_stack->data.block.outer_cleanups == NULL_TREE))
3454 thisblock->data.block.outer_cleanups
3455 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3456 block_stack->data.block.outer_cleanups);
3457 else
3458 thisblock->data.block.outer_cleanups = 0;
3459 thisblock->data.block.label_chain = 0;
3460 thisblock->data.block.innermost_stack_block = stack_block_stack;
3461 thisblock->data.block.first_insn = note;
3462 thisblock->data.block.block_start_count = ++current_block_start_count;
3463 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3464 block_stack = thisblock;
3465 nesting_stack = thisblock;
3467 /* Make a new level for allocating stack slots. */
3468 push_temp_slots ();
3471 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3472 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3473 expand_expr are made. After we end the region, we know that all
3474 space for all temporaries that were created by TARGET_EXPRs will be
3475 destroyed and their space freed for reuse. */
3477 void
3478 expand_start_target_temps (void)
3480 /* This is so that even if the result is preserved, the space
3481 allocated will be freed, as we know that it is no longer in use. */
3482 push_temp_slots ();
3484 /* Start a new binding layer that will keep track of all cleanup
3485 actions to be performed. */
3486 expand_start_bindings (2);
3488 target_temp_slot_level = temp_slot_level;
3491 void
3492 expand_end_target_temps (void)
3494 expand_end_bindings (NULL_TREE, 0, 0);
3496 /* This is so that even if the result is preserved, the space
3497 allocated will be freed, as we know that it is no longer in use. */
3498 pop_temp_slots ();
3501 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3502 in question represents the outermost pair of curly braces (i.e. the "body
3503 block") of a function or method.
3505 For any BLOCK node representing a "body block" of a function or method, the
3506 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3507 represents the outermost (function) scope for the function or method (i.e.
3508 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3509 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3512 is_body_block (tree stmt)
3514 if (lang_hooks.no_body_blocks)
3515 return 0;
3517 if (TREE_CODE (stmt) == BLOCK)
3519 tree parent = BLOCK_SUPERCONTEXT (stmt);
3521 if (parent && TREE_CODE (parent) == BLOCK)
3523 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3525 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3526 return 1;
3530 return 0;
3533 /* True if we are currently emitting insns in an area of output code
3534 that is controlled by a conditional expression. This is used by
3535 the cleanup handling code to generate conditional cleanup actions. */
3538 conditional_context (void)
3540 return block_stack && block_stack->data.block.conditional_code;
3543 /* Return an opaque pointer to the current nesting level, so frontend code
3544 can check its own sanity. */
3546 struct nesting *
3547 current_nesting_level (void)
3549 return cfun ? block_stack : 0;
3552 /* Emit a handler label for a nonlocal goto handler.
3553 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3555 static rtx
3556 expand_nl_handler_label (rtx slot, rtx before_insn)
3558 rtx insns;
3559 rtx handler_label = gen_label_rtx ();
3561 /* Don't let cleanup_cfg delete the handler. */
3562 LABEL_PRESERVE_P (handler_label) = 1;
3564 start_sequence ();
3565 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3566 insns = get_insns ();
3567 end_sequence ();
3568 emit_insn_before (insns, before_insn);
3570 emit_label (handler_label);
3572 return handler_label;
3575 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3576 handler. */
3577 static void
3578 expand_nl_goto_receiver (void)
3580 /* Clobber the FP when we get here, so we have to make sure it's
3581 marked as used by this function. */
3582 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
3584 /* Mark the static chain as clobbered here so life information
3585 doesn't get messed up for it. */
3586 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
3588 #ifdef HAVE_nonlocal_goto
3589 if (! HAVE_nonlocal_goto)
3590 #endif
3591 /* First adjust our frame pointer to its actual value. It was
3592 previously set to the start of the virtual area corresponding to
3593 the stacked variables when we branched here and now needs to be
3594 adjusted to the actual hardware fp value.
3596 Assignments are to virtual registers are converted by
3597 instantiate_virtual_regs into the corresponding assignment
3598 to the underlying register (fp in this case) that makes
3599 the original assignment true.
3600 So the following insn will actually be
3601 decrementing fp by STARTING_FRAME_OFFSET. */
3602 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3604 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3605 if (fixed_regs[ARG_POINTER_REGNUM])
3607 #ifdef ELIMINABLE_REGS
3608 /* If the argument pointer can be eliminated in favor of the
3609 frame pointer, we don't need to restore it. We assume here
3610 that if such an elimination is present, it can always be used.
3611 This is the case on all known machines; if we don't make this
3612 assumption, we do unnecessary saving on many machines. */
3613 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3614 size_t i;
3616 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3617 if (elim_regs[i].from == ARG_POINTER_REGNUM
3618 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3619 break;
3621 if (i == ARRAY_SIZE (elim_regs))
3622 #endif
3624 /* Now restore our arg pointer from the address at which it
3625 was saved in our stack frame. */
3626 emit_move_insn (virtual_incoming_args_rtx,
3627 copy_to_reg (get_arg_pointer_save_area (cfun)));
3630 #endif
3632 #ifdef HAVE_nonlocal_goto_receiver
3633 if (HAVE_nonlocal_goto_receiver)
3634 emit_insn (gen_nonlocal_goto_receiver ());
3635 #endif
3637 /* @@@ This is a kludge. Not all machine descriptions define a blockage
3638 insn, but we must not allow the code we just generated to be reordered
3639 by scheduling. Specifically, the update of the frame pointer must
3640 happen immediately, not later. So emit an ASM_INPUT to act as blockage
3641 insn. */
3642 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
3645 /* Make handlers for nonlocal gotos taking place in the function calls in
3646 block THISBLOCK. */
3648 static void
3649 expand_nl_goto_receivers (struct nesting *thisblock)
3651 tree link;
3652 rtx afterward = gen_label_rtx ();
3653 rtx insns, slot;
3654 rtx label_list;
3655 int any_invalid;
3657 /* Record the handler address in the stack slot for that purpose,
3658 during this block, saving and restoring the outer value. */
3659 if (thisblock->next != 0)
3660 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3662 rtx save_receiver = gen_reg_rtx (Pmode);
3663 emit_move_insn (XEXP (slot, 0), save_receiver);
3665 start_sequence ();
3666 emit_move_insn (save_receiver, XEXP (slot, 0));
3667 insns = get_insns ();
3668 end_sequence ();
3669 emit_insn_before (insns, thisblock->data.block.first_insn);
3672 /* Jump around the handlers; they run only when specially invoked. */
3673 emit_jump (afterward);
3675 /* Make a separate handler for each label. */
3676 link = nonlocal_labels;
3677 slot = nonlocal_goto_handler_slots;
3678 label_list = NULL_RTX;
3679 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3680 /* Skip any labels we shouldn't be able to jump to from here,
3681 we generate one special handler for all of them below which just calls
3682 abort. */
3683 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3685 rtx lab;
3686 lab = expand_nl_handler_label (XEXP (slot, 0),
3687 thisblock->data.block.first_insn);
3688 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3690 expand_nl_goto_receiver ();
3692 /* Jump to the "real" nonlocal label. */
3693 expand_goto (TREE_VALUE (link));
3696 /* A second pass over all nonlocal labels; this time we handle those
3697 we should not be able to jump to at this point. */
3698 link = nonlocal_labels;
3699 slot = nonlocal_goto_handler_slots;
3700 any_invalid = 0;
3701 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3702 if (DECL_TOO_LATE (TREE_VALUE (link)))
3704 rtx lab;
3705 lab = expand_nl_handler_label (XEXP (slot, 0),
3706 thisblock->data.block.first_insn);
3707 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3708 any_invalid = 1;
3711 if (any_invalid)
3713 expand_nl_goto_receiver ();
3714 expand_builtin_trap ();
3717 nonlocal_goto_handler_labels = label_list;
3718 emit_label (afterward);
3721 /* Warn about any unused VARS (which may contain nodes other than
3722 VAR_DECLs, but such nodes are ignored). The nodes are connected
3723 via the TREE_CHAIN field. */
3725 void
3726 warn_about_unused_variables (tree vars)
3728 tree decl;
3730 if (warn_unused_variable)
3731 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3732 if (TREE_CODE (decl) == VAR_DECL
3733 && ! TREE_USED (decl)
3734 && ! DECL_IN_SYSTEM_HEADER (decl)
3735 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3736 warning ("%Junused variable '%D'", decl, decl);
3739 /* Generate RTL code to terminate a binding contour.
3741 VARS is the chain of VAR_DECL nodes for the variables bound in this
3742 contour. There may actually be other nodes in this chain, but any
3743 nodes other than VAR_DECLS are ignored.
3745 MARK_ENDS is nonzero if we should put a note at the beginning
3746 and end of this binding contour.
3748 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3749 zero if we can jump into this contour only if it does not have a saved
3750 stack level, and negative if we are not to check for invalid use of
3751 labels (because the front end does that). */
3753 void
3754 expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
3756 struct nesting *thisblock = block_stack;
3758 /* If any of the variables in this scope were not used, warn the
3759 user. */
3760 warn_about_unused_variables (vars);
3762 if (thisblock->exit_label)
3764 do_pending_stack_adjust ();
3765 emit_label (thisblock->exit_label);
3768 /* If necessary, make handlers for nonlocal gotos taking
3769 place in the function calls in this block. */
3770 if (function_call_count != 0 && nonlocal_labels
3771 /* Make handler for outermost block
3772 if there were any nonlocal gotos to this function. */
3773 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3774 /* Make handler for inner block if it has something
3775 special to do when you jump out of it. */
3776 : (thisblock->data.block.cleanups != 0
3777 || thisblock->data.block.stack_level != 0)))
3778 expand_nl_goto_receivers (thisblock);
3780 /* Don't allow jumping into a block that has a stack level.
3781 Cleanups are allowed, though. */
3782 if (dont_jump_in > 0
3783 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3785 struct label_chain *chain;
3787 /* Any labels in this block are no longer valid to go to.
3788 Mark them to cause an error message. */
3789 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3791 DECL_TOO_LATE (chain->label) = 1;
3792 /* If any goto without a fixup came to this label,
3793 that must be an error, because gotos without fixups
3794 come from outside all saved stack-levels. */
3795 if (TREE_ADDRESSABLE (chain->label))
3796 error ("%Jlabel '%D' used before containing binding contour",
3797 chain->label, chain->label);
3801 /* Restore stack level in effect before the block
3802 (only if variable-size objects allocated). */
3803 /* Perform any cleanups associated with the block. */
3805 if (thisblock->data.block.stack_level != 0
3806 || thisblock->data.block.cleanups != 0)
3808 int reachable;
3809 rtx insn;
3811 /* Don't let cleanups affect ({...}) constructs. */
3812 int old_expr_stmts_for_value = expr_stmts_for_value;
3813 rtx old_last_expr_value = last_expr_value;
3814 rtx old_last_expr_alt_rtl = last_expr_alt_rtl;
3815 tree old_last_expr_type = last_expr_type;
3816 expr_stmts_for_value = 0;
3818 /* Only clean up here if this point can actually be reached. */
3819 insn = get_last_insn ();
3820 if (GET_CODE (insn) == NOTE)
3821 insn = prev_nonnote_insn (insn);
3822 reachable = (! insn || GET_CODE (insn) != BARRIER);
3824 /* Do the cleanups. */
3825 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3826 if (reachable)
3827 do_pending_stack_adjust ();
3829 expr_stmts_for_value = old_expr_stmts_for_value;
3830 last_expr_value = old_last_expr_value;
3831 last_expr_alt_rtl = old_last_expr_alt_rtl;
3832 last_expr_type = old_last_expr_type;
3834 /* Restore the stack level. */
3836 if (reachable && thisblock->data.block.stack_level != 0)
3838 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3839 thisblock->data.block.stack_level, NULL_RTX);
3840 if (nonlocal_goto_handler_slots != 0)
3841 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3842 NULL_RTX);
3845 /* Any gotos out of this block must also do these things.
3846 Also report any gotos with fixups that came to labels in this
3847 level. */
3848 fixup_gotos (thisblock,
3849 thisblock->data.block.stack_level,
3850 thisblock->data.block.cleanups,
3851 thisblock->data.block.first_insn,
3852 dont_jump_in);
3855 /* Mark the beginning and end of the scope if requested.
3856 We do this now, after running cleanups on the variables
3857 just going out of scope, so they are in scope for their cleanups. */
3859 if (mark_ends)
3861 rtx note = emit_note (NOTE_INSN_BLOCK_END);
3862 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3864 else
3865 /* Get rid of the beginning-mark if we don't make an end-mark. */
3866 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3868 /* Restore the temporary level of TARGET_EXPRs. */
3869 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3871 /* Restore block_stack level for containing block. */
3873 stack_block_stack = thisblock->data.block.innermost_stack_block;
3874 POPSTACK (block_stack);
3876 /* Pop the stack slot nesting and free any slots at this level. */
3877 pop_temp_slots ();
3880 /* Generate code to save the stack pointer at the start of the current block
3881 and set up to restore it on exit. */
3883 void
3884 save_stack_pointer (void)
3886 struct nesting *thisblock = block_stack;
3888 if (thisblock->data.block.stack_level == 0)
3890 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3891 &thisblock->data.block.stack_level,
3892 thisblock->data.block.first_insn);
3893 stack_block_stack = thisblock;
3897 /* Generate RTL for the automatic variable declaration DECL.
3898 (Other kinds of declarations are simply ignored if seen here.) */
3900 void
3901 expand_decl (tree decl)
3903 tree type;
3905 type = TREE_TYPE (decl);
3907 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3908 type in case this node is used in a reference. */
3909 if (TREE_CODE (decl) == CONST_DECL)
3911 DECL_MODE (decl) = TYPE_MODE (type);
3912 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3913 DECL_SIZE (decl) = TYPE_SIZE (type);
3914 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3915 return;
3918 /* Otherwise, only automatic variables need any expansion done. Static and
3919 external variables, and external functions, will be handled by
3920 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3921 nothing. PARM_DECLs are handled in `assign_parms'. */
3922 if (TREE_CODE (decl) != VAR_DECL)
3923 return;
3925 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3926 return;
3928 /* Create the RTL representation for the variable. */
3930 if (type == error_mark_node)
3931 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3933 else if (DECL_SIZE (decl) == 0)
3934 /* Variable with incomplete type. */
3936 rtx x;
3937 if (DECL_INITIAL (decl) == 0)
3938 /* Error message was already done; now avoid a crash. */
3939 x = gen_rtx_MEM (BLKmode, const0_rtx);
3940 else
3941 /* An initializer is going to decide the size of this array.
3942 Until we know the size, represent its address with a reg. */
3943 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3945 set_mem_attributes (x, decl, 1);
3946 SET_DECL_RTL (decl, x);
3948 else if (DECL_MODE (decl) != BLKmode
3949 /* If -ffloat-store, don't put explicit float vars
3950 into regs. */
3951 && !(flag_float_store
3952 && TREE_CODE (type) == REAL_TYPE)
3953 && ! TREE_THIS_VOLATILE (decl)
3954 && ! DECL_NONLOCAL (decl)
3955 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3957 /* Automatic variable that can go in a register. */
3958 int unsignedp = TREE_UNSIGNED (type);
3959 enum machine_mode reg_mode
3960 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3962 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3964 if (!DECL_ARTIFICIAL (decl))
3965 mark_user_reg (DECL_RTL (decl));
3967 if (POINTER_TYPE_P (type))
3968 mark_reg_pointer (DECL_RTL (decl),
3969 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3971 maybe_set_unchanging (DECL_RTL (decl), decl);
3973 /* If something wants our address, try to use ADDRESSOF. */
3974 if (TREE_ADDRESSABLE (decl))
3975 put_var_into_stack (decl, /*rescan=*/false);
3978 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3979 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3980 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3981 STACK_CHECK_MAX_VAR_SIZE)))
3983 /* Variable of fixed size that goes on the stack. */
3984 rtx oldaddr = 0;
3985 rtx addr;
3986 rtx x;
3988 /* If we previously made RTL for this decl, it must be an array
3989 whose size was determined by the initializer.
3990 The old address was a register; set that register now
3991 to the proper address. */
3992 if (DECL_RTL_SET_P (decl))
3994 if (GET_CODE (DECL_RTL (decl)) != MEM
3995 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3996 abort ();
3997 oldaddr = XEXP (DECL_RTL (decl), 0);
4000 /* Set alignment we actually gave this decl. */
4001 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
4002 : GET_MODE_BITSIZE (DECL_MODE (decl)));
4003 DECL_USER_ALIGN (decl) = 0;
4005 x = assign_temp (decl, 1, 1, 1);
4006 set_mem_attributes (x, decl, 1);
4007 SET_DECL_RTL (decl, x);
4009 if (oldaddr)
4011 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
4012 if (addr != oldaddr)
4013 emit_move_insn (oldaddr, addr);
4016 else
4017 /* Dynamic-size object: must push space on the stack. */
4019 rtx address, size, x;
4021 /* Record the stack pointer on entry to block, if have
4022 not already done so. */
4023 do_pending_stack_adjust ();
4024 save_stack_pointer ();
4026 /* In function-at-a-time mode, variable_size doesn't expand this,
4027 so do it now. */
4028 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4029 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4030 const0_rtx, VOIDmode, 0);
4032 /* Compute the variable's size, in bytes. */
4033 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4034 free_temp_slots ();
4036 /* Allocate space on the stack for the variable. Note that
4037 DECL_ALIGN says how the variable is to be aligned and we
4038 cannot use it to conclude anything about the alignment of
4039 the size. */
4040 address = allocate_dynamic_stack_space (size, NULL_RTX,
4041 TYPE_ALIGN (TREE_TYPE (decl)));
4043 /* Reference the variable indirect through that rtx. */
4044 x = gen_rtx_MEM (DECL_MODE (decl), address);
4045 set_mem_attributes (x, decl, 1);
4046 SET_DECL_RTL (decl, x);
4049 /* Indicate the alignment we actually gave this variable. */
4050 #ifdef STACK_BOUNDARY
4051 DECL_ALIGN (decl) = STACK_BOUNDARY;
4052 #else
4053 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4054 #endif
4055 DECL_USER_ALIGN (decl) = 0;
4059 /* Emit code to perform the initialization of a declaration DECL. */
4061 void
4062 expand_decl_init (tree decl)
4064 int was_used = TREE_USED (decl);
4066 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4067 for static decls. */
4068 if (TREE_CODE (decl) == CONST_DECL
4069 || TREE_STATIC (decl))
4070 return;
4072 /* Compute and store the initial value now. */
4074 push_temp_slots ();
4076 if (DECL_INITIAL (decl) == error_mark_node)
4078 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4080 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4081 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4082 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4084 emit_queue ();
4086 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4088 emit_line_note (DECL_SOURCE_LOCATION (decl));
4089 expand_assignment (decl, DECL_INITIAL (decl), 0);
4090 emit_queue ();
4093 /* Don't let the initialization count as "using" the variable. */
4094 TREE_USED (decl) = was_used;
4096 /* Free any temporaries we made while initializing the decl. */
4097 preserve_temp_slots (NULL_RTX);
4098 free_temp_slots ();
4099 pop_temp_slots ();
4102 /* CLEANUP is an expression to be executed at exit from this binding contour;
4103 for example, in C++, it might call the destructor for this variable.
4105 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4106 CLEANUP multiple times, and have the correct semantics. This
4107 happens in exception handling, for gotos, returns, breaks that
4108 leave the current scope.
4110 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4111 that is not associated with any particular variable. */
4114 expand_decl_cleanup (tree decl, tree cleanup)
4116 struct nesting *thisblock;
4118 /* Error if we are not in any block. */
4119 if (cfun == 0 || block_stack == 0)
4120 return 0;
4122 thisblock = block_stack;
4124 /* Record the cleanup if there is one. */
4126 if (cleanup != 0)
4128 tree t;
4129 rtx seq;
4130 tree *cleanups = &thisblock->data.block.cleanups;
4131 int cond_context = conditional_context ();
4133 if (cond_context)
4135 rtx flag = gen_reg_rtx (word_mode);
4136 rtx set_flag_0;
4137 tree cond;
4139 start_sequence ();
4140 emit_move_insn (flag, const0_rtx);
4141 set_flag_0 = get_insns ();
4142 end_sequence ();
4144 thisblock->data.block.last_unconditional_cleanup
4145 = emit_insn_after (set_flag_0,
4146 thisblock->data.block.last_unconditional_cleanup);
4148 emit_move_insn (flag, const1_rtx);
4150 cond = build_decl (VAR_DECL, NULL_TREE,
4151 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4152 SET_DECL_RTL (cond, flag);
4154 /* Conditionalize the cleanup. */
4155 cleanup = build (COND_EXPR, void_type_node,
4156 (*lang_hooks.truthvalue_conversion) (cond),
4157 cleanup, integer_zero_node);
4158 cleanup = fold (cleanup);
4160 cleanups = &thisblock->data.block.cleanups;
4163 cleanup = unsave_expr (cleanup);
4165 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4167 if (! cond_context)
4168 /* If this block has a cleanup, it belongs in stack_block_stack. */
4169 stack_block_stack = thisblock;
4171 if (cond_context)
4173 start_sequence ();
4176 if (! using_eh_for_cleanups_p)
4177 TREE_ADDRESSABLE (t) = 1;
4178 else
4179 expand_eh_region_start ();
4181 if (cond_context)
4183 seq = get_insns ();
4184 end_sequence ();
4185 if (seq)
4186 thisblock->data.block.last_unconditional_cleanup
4187 = emit_insn_after (seq,
4188 thisblock->data.block.last_unconditional_cleanup);
4190 else
4192 thisblock->data.block.last_unconditional_cleanup
4193 = get_last_insn ();
4194 /* When we insert instructions after the last unconditional cleanup,
4195 we don't adjust last_insn. That means that a later add_insn will
4196 clobber the instructions we've just added. The easiest way to
4197 fix this is to just insert another instruction here, so that the
4198 instructions inserted after the last unconditional cleanup are
4199 never the last instruction. */
4200 emit_note (NOTE_INSN_DELETED);
4203 return 1;
4206 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4207 is thrown. */
4210 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
4212 int ret = expand_decl_cleanup (decl, cleanup);
4213 if (cleanup && ret)
4215 tree node = block_stack->data.block.cleanups;
4216 CLEANUP_EH_ONLY (node) = eh_only;
4218 return ret;
4221 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4222 DECL_ELTS is the list of elements that belong to DECL's type.
4223 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4225 void
4226 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
4228 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4229 rtx x;
4230 tree t;
4232 /* If any of the elements are addressable, so is the entire union. */
4233 for (t = decl_elts; t; t = TREE_CHAIN (t))
4234 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4236 TREE_ADDRESSABLE (decl) = 1;
4237 break;
4240 expand_decl (decl);
4241 expand_decl_cleanup (decl, cleanup);
4242 x = DECL_RTL (decl);
4244 /* Go through the elements, assigning RTL to each. */
4245 for (t = decl_elts; t; t = TREE_CHAIN (t))
4247 tree decl_elt = TREE_VALUE (t);
4248 tree cleanup_elt = TREE_PURPOSE (t);
4249 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4251 /* If any of the elements are addressable, so is the entire
4252 union. */
4253 if (TREE_USED (decl_elt))
4254 TREE_USED (decl) = 1;
4256 /* Propagate the union's alignment to the elements. */
4257 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4258 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4260 /* If the element has BLKmode and the union doesn't, the union is
4261 aligned such that the element doesn't need to have BLKmode, so
4262 change the element's mode to the appropriate one for its size. */
4263 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4264 DECL_MODE (decl_elt) = mode
4265 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4267 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4268 instead create a new MEM rtx with the proper mode. */
4269 if (GET_CODE (x) == MEM)
4271 if (mode == GET_MODE (x))
4272 SET_DECL_RTL (decl_elt, x);
4273 else
4274 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4276 else if (GET_CODE (x) == REG)
4278 if (mode == GET_MODE (x))
4279 SET_DECL_RTL (decl_elt, x);
4280 else
4281 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4283 else
4284 abort ();
4286 /* Record the cleanup if there is one. */
4288 if (cleanup != 0)
4289 thisblock->data.block.cleanups
4290 = tree_cons (decl_elt, cleanup_elt,
4291 thisblock->data.block.cleanups);
4295 /* Expand a list of cleanups LIST.
4296 Elements may be expressions or may be nested lists.
4298 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4299 goto and handle protection regions specially in that case.
4301 If REACHABLE, we emit code, otherwise just inform the exception handling
4302 code about this finalization. */
4304 static void
4305 expand_cleanups (tree list, int in_fixup, int reachable)
4307 tree tail;
4308 for (tail = list; tail; tail = TREE_CHAIN (tail))
4309 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4310 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
4311 else
4313 if (! in_fixup && using_eh_for_cleanups_p)
4314 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4316 if (reachable && !CLEANUP_EH_ONLY (tail))
4318 /* Cleanups may be run multiple times. For example,
4319 when exiting a binding contour, we expand the
4320 cleanups associated with that contour. When a goto
4321 within that binding contour has a target outside that
4322 contour, it will expand all cleanups from its scope to
4323 the target. Though the cleanups are expanded multiple
4324 times, the control paths are non-overlapping so the
4325 cleanups will not be executed twice. */
4327 /* We may need to protect from outer cleanups. */
4328 if (in_fixup && using_eh_for_cleanups_p)
4330 expand_eh_region_start ();
4332 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4334 expand_eh_region_end_fixup (TREE_VALUE (tail));
4336 else
4337 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4339 free_temp_slots ();
4344 /* Mark when the context we are emitting RTL for as a conditional
4345 context, so that any cleanup actions we register with
4346 expand_decl_init will be properly conditionalized when those
4347 cleanup actions are later performed. Must be called before any
4348 expression (tree) is expanded that is within a conditional context. */
4350 void
4351 start_cleanup_deferral (void)
4353 /* block_stack can be NULL if we are inside the parameter list. It is
4354 OK to do nothing, because cleanups aren't possible here. */
4355 if (block_stack)
4356 ++block_stack->data.block.conditional_code;
4359 /* Mark the end of a conditional region of code. Because cleanup
4360 deferrals may be nested, we may still be in a conditional region
4361 after we end the currently deferred cleanups, only after we end all
4362 deferred cleanups, are we back in unconditional code. */
4364 void
4365 end_cleanup_deferral (void)
4367 /* block_stack can be NULL if we are inside the parameter list. It is
4368 OK to do nothing, because cleanups aren't possible here. */
4369 if (block_stack)
4370 --block_stack->data.block.conditional_code;
4373 tree
4374 last_cleanup_this_contour (void)
4376 if (block_stack == 0)
4377 return 0;
4379 return block_stack->data.block.cleanups;
4382 /* Return 1 if there are any pending cleanups at this point.
4383 Check the current contour as well as contours that enclose
4384 the current contour. */
4387 any_pending_cleanups (void)
4389 struct nesting *block;
4391 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4392 return 0;
4394 if (block_stack->data.block.cleanups != NULL)
4395 return 1;
4397 if (block_stack->data.block.outer_cleanups == 0)
4398 return 0;
4400 for (block = block_stack->next; block; block = block->next)
4401 if (block->data.block.cleanups != 0)
4402 return 1;
4404 return 0;
4407 /* Enter a case (Pascal) or switch (C) statement.
4408 Push a block onto case_stack and nesting_stack
4409 to accumulate the case-labels that are seen
4410 and to record the labels generated for the statement.
4412 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4413 Otherwise, this construct is transparent for `exit_something'.
4415 EXPR is the index-expression to be dispatched on.
4416 TYPE is its nominal type. We could simply convert EXPR to this type,
4417 but instead we take short cuts. */
4419 void
4420 expand_start_case (int exit_flag, tree expr, tree type,
4421 const char *printname)
4423 struct nesting *thiscase = ALLOC_NESTING ();
4425 /* Make an entry on case_stack for the case we are entering. */
4427 thiscase->desc = CASE_NESTING;
4428 thiscase->next = case_stack;
4429 thiscase->all = nesting_stack;
4430 thiscase->depth = ++nesting_depth;
4431 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4432 thiscase->data.case_stmt.case_list = 0;
4433 thiscase->data.case_stmt.index_expr = expr;
4434 thiscase->data.case_stmt.nominal_type = type;
4435 thiscase->data.case_stmt.default_label = 0;
4436 thiscase->data.case_stmt.printname = printname;
4437 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4438 case_stack = thiscase;
4439 nesting_stack = thiscase;
4441 do_pending_stack_adjust ();
4442 emit_queue ();
4444 /* Make sure case_stmt.start points to something that won't
4445 need any transformation before expand_end_case. */
4446 if (GET_CODE (get_last_insn ()) != NOTE)
4447 emit_note (NOTE_INSN_DELETED);
4449 thiscase->data.case_stmt.start = get_last_insn ();
4451 start_cleanup_deferral ();
4454 /* Start a "dummy case statement" within which case labels are invalid
4455 and are not connected to any larger real case statement.
4456 This can be used if you don't want to let a case statement jump
4457 into the middle of certain kinds of constructs. */
4459 void
4460 expand_start_case_dummy (void)
4462 struct nesting *thiscase = ALLOC_NESTING ();
4464 /* Make an entry on case_stack for the dummy. */
4466 thiscase->desc = CASE_NESTING;
4467 thiscase->next = case_stack;
4468 thiscase->all = nesting_stack;
4469 thiscase->depth = ++nesting_depth;
4470 thiscase->exit_label = 0;
4471 thiscase->data.case_stmt.case_list = 0;
4472 thiscase->data.case_stmt.start = 0;
4473 thiscase->data.case_stmt.nominal_type = 0;
4474 thiscase->data.case_stmt.default_label = 0;
4475 case_stack = thiscase;
4476 nesting_stack = thiscase;
4477 start_cleanup_deferral ();
4480 static void
4481 check_seenlabel (void)
4483 /* If this is the first label, warn if any insns have been emitted. */
4484 if (case_stack->data.case_stmt.line_number_status >= 0)
4486 rtx insn;
4488 restore_line_number_status
4489 (case_stack->data.case_stmt.line_number_status);
4490 case_stack->data.case_stmt.line_number_status = -1;
4492 for (insn = case_stack->data.case_stmt.start;
4493 insn;
4494 insn = NEXT_INSN (insn))
4496 if (GET_CODE (insn) == CODE_LABEL)
4497 break;
4498 if (GET_CODE (insn) != NOTE
4499 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4502 insn = PREV_INSN (insn);
4503 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4505 /* If insn is zero, then there must have been a syntax error. */
4506 if (insn)
4508 location_t locus;
4509 locus.file = NOTE_SOURCE_FILE (insn);
4510 locus.line = NOTE_LINE_NUMBER (insn);
4511 warning ("%Hunreachable code at beginning of %s", &locus,
4512 case_stack->data.case_stmt.printname);
4514 break;
4520 /* Accumulate one case or default label inside a case or switch statement.
4521 VALUE is the value of the case (a null pointer, for a default label).
4522 The function CONVERTER, when applied to arguments T and V,
4523 converts the value V to the type T.
4525 If not currently inside a case or switch statement, return 1 and do
4526 nothing. The caller will print a language-specific error message.
4527 If VALUE is a duplicate or overlaps, return 2 and do nothing
4528 except store the (first) duplicate node in *DUPLICATE.
4529 If VALUE is out of range, return 3 and do nothing.
4530 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4531 Return 0 on success.
4533 Extended to handle range statements. */
4536 pushcase (tree value, tree (*converter) (tree, tree), tree label,
4537 tree *duplicate)
4539 tree index_type;
4540 tree nominal_type;
4542 /* Fail if not inside a real case statement. */
4543 if (! (case_stack && case_stack->data.case_stmt.start))
4544 return 1;
4546 if (stack_block_stack
4547 && stack_block_stack->depth > case_stack->depth)
4548 return 5;
4550 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4551 nominal_type = case_stack->data.case_stmt.nominal_type;
4553 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4554 if (index_type == error_mark_node)
4555 return 0;
4557 /* Convert VALUE to the type in which the comparisons are nominally done. */
4558 if (value != 0)
4559 value = (*converter) (nominal_type, value);
4561 check_seenlabel ();
4563 /* Fail if this value is out of range for the actual type of the index
4564 (which may be narrower than NOMINAL_TYPE). */
4565 if (value != 0
4566 && (TREE_CONSTANT_OVERFLOW (value)
4567 || ! int_fits_type_p (value, index_type)))
4568 return 3;
4570 return add_case_node (value, value, label, duplicate);
4573 /* Like pushcase but this case applies to all values between VALUE1 and
4574 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4575 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4576 starts at VALUE1 and ends at the highest value of the index type.
4577 If both are NULL, this case applies to all values.
4579 The return value is the same as that of pushcase but there is one
4580 additional error code: 4 means the specified range was empty. */
4583 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
4584 tree label, tree *duplicate)
4586 tree index_type;
4587 tree nominal_type;
4589 /* Fail if not inside a real case statement. */
4590 if (! (case_stack && case_stack->data.case_stmt.start))
4591 return 1;
4593 if (stack_block_stack
4594 && stack_block_stack->depth > case_stack->depth)
4595 return 5;
4597 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4598 nominal_type = case_stack->data.case_stmt.nominal_type;
4600 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4601 if (index_type == error_mark_node)
4602 return 0;
4604 check_seenlabel ();
4606 /* Convert VALUEs to type in which the comparisons are nominally done
4607 and replace any unspecified value with the corresponding bound. */
4608 if (value1 == 0)
4609 value1 = TYPE_MIN_VALUE (index_type);
4610 if (value2 == 0)
4611 value2 = TYPE_MAX_VALUE (index_type);
4613 /* Fail if the range is empty. Do this before any conversion since
4614 we want to allow out-of-range empty ranges. */
4615 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4616 return 4;
4618 /* If the max was unbounded, use the max of the nominal_type we are
4619 converting to. Do this after the < check above to suppress false
4620 positives. */
4621 if (value2 == 0)
4622 value2 = TYPE_MAX_VALUE (nominal_type);
4624 value1 = (*converter) (nominal_type, value1);
4625 value2 = (*converter) (nominal_type, value2);
4627 /* Fail if these values are out of range. */
4628 if (TREE_CONSTANT_OVERFLOW (value1)
4629 || ! int_fits_type_p (value1, index_type))
4630 return 3;
4632 if (TREE_CONSTANT_OVERFLOW (value2)
4633 || ! int_fits_type_p (value2, index_type))
4634 return 3;
4636 return add_case_node (value1, value2, label, duplicate);
4639 /* Do the actual insertion of a case label for pushcase and pushcase_range
4640 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4641 slowdown for large switch statements. */
4644 add_case_node (tree low, tree high, tree label, tree *duplicate)
4646 struct case_node *p, **q, *r;
4648 /* If there's no HIGH value, then this is not a case range; it's
4649 just a simple case label. But that's just a degenerate case
4650 range. */
4651 if (!high)
4652 high = low;
4654 /* Handle default labels specially. */
4655 if (!high && !low)
4657 if (case_stack->data.case_stmt.default_label != 0)
4659 *duplicate = case_stack->data.case_stmt.default_label;
4660 return 2;
4662 case_stack->data.case_stmt.default_label = label;
4663 expand_label (label);
4664 return 0;
4667 q = &case_stack->data.case_stmt.case_list;
4668 p = *q;
4670 while ((r = *q))
4672 p = r;
4674 /* Keep going past elements distinctly greater than HIGH. */
4675 if (tree_int_cst_lt (high, p->low))
4676 q = &p->left;
4678 /* or distinctly less than LOW. */
4679 else if (tree_int_cst_lt (p->high, low))
4680 q = &p->right;
4682 else
4684 /* We have an overlap; this is an error. */
4685 *duplicate = p->code_label;
4686 return 2;
4690 /* Add this label to the chain, and succeed. */
4692 r = ggc_alloc (sizeof (struct case_node));
4693 r->low = low;
4695 /* If the bounds are equal, turn this into the one-value case. */
4696 if (tree_int_cst_equal (low, high))
4697 r->high = r->low;
4698 else
4699 r->high = high;
4701 r->code_label = label;
4702 expand_label (label);
4704 *q = r;
4705 r->parent = p;
4706 r->left = 0;
4707 r->right = 0;
4708 r->balance = 0;
4710 while (p)
4712 struct case_node *s;
4714 if (r == p->left)
4716 int b;
4718 if (! (b = p->balance))
4719 /* Growth propagation from left side. */
4720 p->balance = -1;
4721 else if (b < 0)
4723 if (r->balance < 0)
4725 /* R-Rotation */
4726 if ((p->left = s = r->right))
4727 s->parent = p;
4729 r->right = p;
4730 p->balance = 0;
4731 r->balance = 0;
4732 s = p->parent;
4733 p->parent = r;
4735 if ((r->parent = s))
4737 if (s->left == p)
4738 s->left = r;
4739 else
4740 s->right = r;
4742 else
4743 case_stack->data.case_stmt.case_list = r;
4745 else
4746 /* r->balance == +1 */
4748 /* LR-Rotation */
4750 int b2;
4751 struct case_node *t = r->right;
4753 if ((p->left = s = t->right))
4754 s->parent = p;
4756 t->right = p;
4757 if ((r->right = s = t->left))
4758 s->parent = r;
4760 t->left = r;
4761 b = t->balance;
4762 b2 = b < 0;
4763 p->balance = b2;
4764 b2 = -b2 - b;
4765 r->balance = b2;
4766 t->balance = 0;
4767 s = p->parent;
4768 p->parent = t;
4769 r->parent = t;
4771 if ((t->parent = s))
4773 if (s->left == p)
4774 s->left = t;
4775 else
4776 s->right = t;
4778 else
4779 case_stack->data.case_stmt.case_list = t;
4781 break;
4784 else
4786 /* p->balance == +1; growth of left side balances the node. */
4787 p->balance = 0;
4788 break;
4791 else
4792 /* r == p->right */
4794 int b;
4796 if (! (b = p->balance))
4797 /* Growth propagation from right side. */
4798 p->balance++;
4799 else if (b > 0)
4801 if (r->balance > 0)
4803 /* L-Rotation */
4805 if ((p->right = s = r->left))
4806 s->parent = p;
4808 r->left = p;
4809 p->balance = 0;
4810 r->balance = 0;
4811 s = p->parent;
4812 p->parent = r;
4813 if ((r->parent = s))
4815 if (s->left == p)
4816 s->left = r;
4817 else
4818 s->right = r;
4821 else
4822 case_stack->data.case_stmt.case_list = r;
4825 else
4826 /* r->balance == -1 */
4828 /* RL-Rotation */
4829 int b2;
4830 struct case_node *t = r->left;
4832 if ((p->right = s = t->left))
4833 s->parent = p;
4835 t->left = p;
4837 if ((r->left = s = t->right))
4838 s->parent = r;
4840 t->right = r;
4841 b = t->balance;
4842 b2 = b < 0;
4843 r->balance = b2;
4844 b2 = -b2 - b;
4845 p->balance = b2;
4846 t->balance = 0;
4847 s = p->parent;
4848 p->parent = t;
4849 r->parent = t;
4851 if ((t->parent = s))
4853 if (s->left == p)
4854 s->left = t;
4855 else
4856 s->right = t;
4859 else
4860 case_stack->data.case_stmt.case_list = t;
4862 break;
4864 else
4866 /* p->balance == -1; growth of right side balances the node. */
4867 p->balance = 0;
4868 break;
4872 r = p;
4873 p = p->parent;
4876 return 0;
4879 /* Returns the number of possible values of TYPE.
4880 Returns -1 if the number is unknown, variable, or if the number does not
4881 fit in a HOST_WIDE_INT.
4882 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4883 do not increase monotonically (there may be duplicates);
4884 to 1 if the values increase monotonically, but not always by 1;
4885 otherwise sets it to 0. */
4887 HOST_WIDE_INT
4888 all_cases_count (tree type, int *sparseness)
4890 tree t;
4891 HOST_WIDE_INT count, minval, lastval;
4893 *sparseness = 0;
4895 switch (TREE_CODE (type))
4897 case BOOLEAN_TYPE:
4898 count = 2;
4899 break;
4901 case CHAR_TYPE:
4902 count = 1 << BITS_PER_UNIT;
4903 break;
4905 default:
4906 case INTEGER_TYPE:
4907 if (TYPE_MAX_VALUE (type) != 0
4908 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4909 TYPE_MIN_VALUE (type))))
4910 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4911 convert (type, integer_zero_node))))
4912 && host_integerp (t, 1))
4913 count = tree_low_cst (t, 1);
4914 else
4915 return -1;
4916 break;
4918 case ENUMERAL_TYPE:
4919 /* Don't waste time with enumeral types with huge values. */
4920 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4921 || TYPE_MAX_VALUE (type) == 0
4922 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4923 return -1;
4925 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4926 count = 0;
4928 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4930 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4932 if (*sparseness == 2 || thisval <= lastval)
4933 *sparseness = 2;
4934 else if (thisval != minval + count)
4935 *sparseness = 1;
4937 lastval = thisval;
4938 count++;
4942 return count;
4945 #define BITARRAY_TEST(ARRAY, INDEX) \
4946 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4947 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4948 #define BITARRAY_SET(ARRAY, INDEX) \
4949 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4950 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4952 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4953 with the case values we have seen, assuming the case expression
4954 has the given TYPE.
4955 SPARSENESS is as determined by all_cases_count.
4957 The time needed is proportional to COUNT, unless
4958 SPARSENESS is 2, in which case quadratic time is needed. */
4960 void
4961 mark_seen_cases (tree type, unsigned char *cases_seen, HOST_WIDE_INT count,
4962 int sparseness)
4964 tree next_node_to_try = NULL_TREE;
4965 HOST_WIDE_INT next_node_offset = 0;
4967 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4968 tree val = make_node (INTEGER_CST);
4970 TREE_TYPE (val) = type;
4971 if (! root)
4972 /* Do nothing. */
4974 else if (sparseness == 2)
4976 tree t;
4977 unsigned HOST_WIDE_INT xlo;
4979 /* This less efficient loop is only needed to handle
4980 duplicate case values (multiple enum constants
4981 with the same value). */
4982 TREE_TYPE (val) = TREE_TYPE (root->low);
4983 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4984 t = TREE_CHAIN (t), xlo++)
4986 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4987 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4988 n = root;
4991 /* Keep going past elements distinctly greater than VAL. */
4992 if (tree_int_cst_lt (val, n->low))
4993 n = n->left;
4995 /* or distinctly less than VAL. */
4996 else if (tree_int_cst_lt (n->high, val))
4997 n = n->right;
4999 else
5001 /* We have found a matching range. */
5002 BITARRAY_SET (cases_seen, xlo);
5003 break;
5006 while (n);
5009 else
5011 if (root->left)
5012 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5014 for (n = root; n; n = n->right)
5016 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5017 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5018 while (! tree_int_cst_lt (n->high, val))
5020 /* Calculate (into xlo) the "offset" of the integer (val).
5021 The element with lowest value has offset 0, the next smallest
5022 element has offset 1, etc. */
5024 unsigned HOST_WIDE_INT xlo;
5025 HOST_WIDE_INT xhi;
5026 tree t;
5028 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5030 /* The TYPE_VALUES will be in increasing order, so
5031 starting searching where we last ended. */
5032 t = next_node_to_try;
5033 xlo = next_node_offset;
5034 xhi = 0;
5035 for (;;)
5037 if (t == NULL_TREE)
5039 t = TYPE_VALUES (type);
5040 xlo = 0;
5042 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5044 next_node_to_try = TREE_CHAIN (t);
5045 next_node_offset = xlo + 1;
5046 break;
5048 xlo++;
5049 t = TREE_CHAIN (t);
5050 if (t == next_node_to_try)
5052 xlo = -1;
5053 break;
5057 else
5059 t = TYPE_MIN_VALUE (type);
5060 if (t)
5061 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5062 &xlo, &xhi);
5063 else
5064 xlo = xhi = 0;
5065 add_double (xlo, xhi,
5066 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5067 &xlo, &xhi);
5070 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5071 BITARRAY_SET (cases_seen, xlo);
5073 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5074 1, 0,
5075 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5081 /* Given a switch statement with an expression that is an enumeration
5082 type, warn if any of the enumeration type's literals are not
5083 covered by the case expressions of the switch. Also, warn if there
5084 are any extra switch cases that are *not* elements of the
5085 enumerated type.
5087 Historical note:
5089 At one stage this function would: ``If all enumeration literals
5090 were covered by the case expressions, turn one of the expressions
5091 into the default expression since it should not be possible to fall
5092 through such a switch.''
5094 That code has since been removed as: ``This optimization is
5095 disabled because it causes valid programs to fail. ANSI C does not
5096 guarantee that an expression with enum type will have a value that
5097 is the same as one of the enumeration literals.'' */
5099 void
5100 check_for_full_enumeration_handling (tree type)
5102 struct case_node *n;
5103 tree chain;
5105 /* True iff the selector type is a numbered set mode. */
5106 int sparseness = 0;
5108 /* The number of possible selector values. */
5109 HOST_WIDE_INT size;
5111 /* For each possible selector value. a one iff it has been matched
5112 by a case value alternative. */
5113 unsigned char *cases_seen;
5115 /* The allocated size of cases_seen, in chars. */
5116 HOST_WIDE_INT bytes_needed;
5118 size = all_cases_count (type, &sparseness);
5119 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5121 if (size > 0 && size < 600000
5122 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5123 this optimization if we don't have enough memory rather than
5124 aborting, as xmalloc would do. */
5125 && (cases_seen = really_call_calloc (bytes_needed, 1)) != NULL)
5127 HOST_WIDE_INT i;
5128 tree v = TYPE_VALUES (type);
5130 /* The time complexity of this code is normally O(N), where
5131 N being the number of members in the enumerated type.
5132 However, if type is an ENUMERAL_TYPE whose values do not
5133 increase monotonically, O(N*log(N)) time may be needed. */
5135 mark_seen_cases (type, cases_seen, size, sparseness);
5137 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5138 if (BITARRAY_TEST (cases_seen, i) == 0)
5139 warning ("enumeration value `%s' not handled in switch",
5140 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5142 free (cases_seen);
5145 /* Now we go the other way around; we warn if there are case
5146 expressions that don't correspond to enumerators. This can
5147 occur since C and C++ don't enforce type-checking of
5148 assignments to enumeration variables. */
5150 if (case_stack->data.case_stmt.case_list
5151 && case_stack->data.case_stmt.case_list->left)
5152 case_stack->data.case_stmt.case_list
5153 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5154 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5156 for (chain = TYPE_VALUES (type);
5157 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5158 chain = TREE_CHAIN (chain))
5161 if (!chain)
5163 if (TYPE_NAME (type) == 0)
5164 warning ("case value `%ld' not in enumerated type",
5165 (long) TREE_INT_CST_LOW (n->low));
5166 else
5167 warning ("case value `%ld' not in enumerated type `%s'",
5168 (long) TREE_INT_CST_LOW (n->low),
5169 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5170 == IDENTIFIER_NODE)
5171 ? TYPE_NAME (type)
5172 : DECL_NAME (TYPE_NAME (type))));
5174 if (!tree_int_cst_equal (n->low, n->high))
5176 for (chain = TYPE_VALUES (type);
5177 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5178 chain = TREE_CHAIN (chain))
5181 if (!chain)
5183 if (TYPE_NAME (type) == 0)
5184 warning ("case value `%ld' not in enumerated type",
5185 (long) TREE_INT_CST_LOW (n->high));
5186 else
5187 warning ("case value `%ld' not in enumerated type `%s'",
5188 (long) TREE_INT_CST_LOW (n->high),
5189 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5190 == IDENTIFIER_NODE)
5191 ? TYPE_NAME (type)
5192 : DECL_NAME (TYPE_NAME (type))));
5199 /* Maximum number of case bit tests. */
5200 #define MAX_CASE_BIT_TESTS 3
5202 /* By default, enable case bit tests on targets with ashlsi3. */
5203 #ifndef CASE_USE_BIT_TESTS
5204 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
5205 != CODE_FOR_nothing)
5206 #endif
5209 /* A case_bit_test represents a set of case nodes that may be
5210 selected from using a bit-wise comparison. HI and LO hold
5211 the integer to be tested against, LABEL contains the label
5212 to jump to upon success and BITS counts the number of case
5213 nodes handled by this test, typically the number of bits
5214 set in HI:LO. */
5216 struct case_bit_test
5218 HOST_WIDE_INT hi;
5219 HOST_WIDE_INT lo;
5220 rtx label;
5221 int bits;
5224 /* Determine whether "1 << x" is relatively cheap in word_mode. */
5226 static
5227 bool lshift_cheap_p (void)
5229 static bool init = false;
5230 static bool cheap = true;
5232 if (!init)
5234 rtx reg = gen_rtx_REG (word_mode, 10000);
5235 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
5236 cheap = cost < COSTS_N_INSNS (3);
5237 init = true;
5240 return cheap;
5243 /* Comparison function for qsort to order bit tests by decreasing
5244 number of case nodes, i.e. the node with the most cases gets
5245 tested first. */
5247 static
5248 int case_bit_test_cmp (const void *p1, const void *p2)
5250 const struct case_bit_test *d1 = p1;
5251 const struct case_bit_test *d2 = p2;
5253 return d2->bits - d1->bits;
5256 /* Expand a switch statement by a short sequence of bit-wise
5257 comparisons. "switch(x)" is effectively converted into
5258 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
5259 integer constants.
5261 INDEX_EXPR is the value being switched on, which is of
5262 type INDEX_TYPE. MINVAL is the lowest case value of in
5263 the case nodes, of INDEX_TYPE type, and RANGE is highest
5264 value minus MINVAL, also of type INDEX_TYPE. NODES is
5265 the set of case nodes, and DEFAULT_LABEL is the label to
5266 branch to should none of the cases match.
5268 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
5269 node targets. */
5271 static void
5272 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
5273 tree range, case_node_ptr nodes, rtx default_label)
5275 struct case_bit_test test[MAX_CASE_BIT_TESTS];
5276 enum machine_mode mode;
5277 rtx expr, index, label;
5278 unsigned int i,j,lo,hi;
5279 struct case_node *n;
5280 unsigned int count;
5282 count = 0;
5283 for (n = nodes; n; n = n->right)
5285 label = label_rtx (n->code_label);
5286 for (i = 0; i < count; i++)
5287 if (same_case_target_p (label, test[i].label))
5288 break;
5290 if (i == count)
5292 if (count >= MAX_CASE_BIT_TESTS)
5293 abort ();
5294 test[i].hi = 0;
5295 test[i].lo = 0;
5296 test[i].label = label;
5297 test[i].bits = 1;
5298 count++;
5300 else
5301 test[i].bits++;
5303 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5304 n->low, minval)), 1);
5305 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5306 n->high, minval)), 1);
5307 for (j = lo; j <= hi; j++)
5308 if (j >= HOST_BITS_PER_WIDE_INT)
5309 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
5310 else
5311 test[i].lo |= (HOST_WIDE_INT) 1 << j;
5314 qsort (test, count, sizeof(*test), case_bit_test_cmp);
5316 index_expr = fold (build (MINUS_EXPR, index_type,
5317 convert (index_type, index_expr),
5318 convert (index_type, minval)));
5319 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5320 emit_queue ();
5321 index = protect_from_queue (index, 0);
5322 do_pending_stack_adjust ();
5324 mode = TYPE_MODE (index_type);
5325 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
5326 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
5327 default_label);
5329 index = convert_to_mode (word_mode, index, 0);
5330 index = expand_binop (word_mode, ashl_optab, const1_rtx,
5331 index, NULL_RTX, 1, OPTAB_WIDEN);
5333 for (i = 0; i < count; i++)
5335 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
5336 expr = expand_binop (word_mode, and_optab, index, expr,
5337 NULL_RTX, 1, OPTAB_WIDEN);
5338 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
5339 word_mode, 1, test[i].label);
5342 emit_jump (default_label);
5345 /* Terminate a case (Pascal) or switch (C) statement
5346 in which ORIG_INDEX is the expression to be tested.
5347 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5348 type as given in the source before any compiler conversions.
5349 Generate the code to test it and jump to the right place. */
5351 void
5352 expand_end_case_type (tree orig_index, tree orig_type)
5354 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5355 rtx default_label = 0;
5356 struct case_node *n, *m;
5357 unsigned int count, uniq;
5358 rtx index;
5359 rtx table_label;
5360 int ncases;
5361 rtx *labelvec;
5362 int i;
5363 rtx before_case, end, lab;
5364 struct nesting *thiscase = case_stack;
5365 tree index_expr, index_type;
5366 bool exit_done = false;
5367 int unsignedp;
5369 /* Don't crash due to previous errors. */
5370 if (thiscase == NULL)
5371 return;
5373 index_expr = thiscase->data.case_stmt.index_expr;
5374 index_type = TREE_TYPE (index_expr);
5375 unsignedp = TREE_UNSIGNED (index_type);
5376 if (orig_type == NULL)
5377 orig_type = TREE_TYPE (orig_index);
5379 do_pending_stack_adjust ();
5381 /* This might get a spurious warning in the presence of a syntax error;
5382 it could be fixed by moving the call to check_seenlabel after the
5383 check for error_mark_node, and copying the code of check_seenlabel that
5384 deals with case_stack->data.case_stmt.line_number_status /
5385 restore_line_number_status in front of the call to end_cleanup_deferral;
5386 However, this might miss some useful warnings in the presence of
5387 non-syntax errors. */
5388 check_seenlabel ();
5390 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5391 if (index_type != error_mark_node)
5393 /* If the switch expression was an enumerated type, check that
5394 exactly all enumeration literals are covered by the cases.
5395 The check is made when -Wswitch was specified and there is no
5396 default case, or when -Wswitch-enum was specified. */
5397 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5398 || warn_switch_enum)
5399 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5400 && TREE_CODE (index_expr) != INTEGER_CST)
5401 check_for_full_enumeration_handling (orig_type);
5403 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5404 warning ("switch missing default case");
5406 /* If we don't have a default-label, create one here,
5407 after the body of the switch. */
5408 if (thiscase->data.case_stmt.default_label == 0)
5410 thiscase->data.case_stmt.default_label
5411 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5412 /* Share the exit label if possible. */
5413 if (thiscase->exit_label)
5415 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
5416 thiscase->exit_label);
5417 exit_done = true;
5419 expand_label (thiscase->data.case_stmt.default_label);
5421 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5423 before_case = get_last_insn ();
5425 if (thiscase->data.case_stmt.case_list
5426 && thiscase->data.case_stmt.case_list->left)
5427 thiscase->data.case_stmt.case_list
5428 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5430 /* Simplify the case-list before we count it. */
5431 group_case_nodes (thiscase->data.case_stmt.case_list);
5432 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5433 default_label);
5435 /* Get upper and lower bounds of case values.
5436 Also convert all the case values to the index expr's data type. */
5438 uniq = 0;
5439 count = 0;
5440 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5442 /* Check low and high label values are integers. */
5443 if (TREE_CODE (n->low) != INTEGER_CST)
5444 abort ();
5445 if (TREE_CODE (n->high) != INTEGER_CST)
5446 abort ();
5448 n->low = convert (index_type, n->low);
5449 n->high = convert (index_type, n->high);
5451 /* Count the elements and track the largest and smallest
5452 of them (treating them as signed even if they are not). */
5453 if (count++ == 0)
5455 minval = n->low;
5456 maxval = n->high;
5458 else
5460 if (INT_CST_LT (n->low, minval))
5461 minval = n->low;
5462 if (INT_CST_LT (maxval, n->high))
5463 maxval = n->high;
5465 /* A range counts double, since it requires two compares. */
5466 if (! tree_int_cst_equal (n->low, n->high))
5467 count++;
5469 /* Count the number of unique case node targets. */
5470 uniq++;
5471 lab = label_rtx (n->code_label);
5472 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
5473 if (same_case_target_p (label_rtx (m->code_label), lab))
5475 uniq--;
5476 break;
5480 /* Compute span of values. */
5481 if (count != 0)
5482 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5484 end_cleanup_deferral ();
5486 if (count == 0)
5488 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5489 emit_queue ();
5490 emit_jump (default_label);
5493 /* Try implementing this switch statement by a short sequence of
5494 bit-wise comparisons. However, we let the binary-tree case
5495 below handle constant index expressions. */
5496 else if (CASE_USE_BIT_TESTS
5497 && ! TREE_CONSTANT (index_expr)
5498 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
5499 && compare_tree_int (range, 0) > 0
5500 && lshift_cheap_p ()
5501 && ((uniq == 1 && count >= 3)
5502 || (uniq == 2 && count >= 5)
5503 || (uniq == 3 && count >= 6)))
5505 /* Optimize the case where all the case values fit in a
5506 word without having to subtract MINVAL. In this case,
5507 we can optimize away the subtraction. */
5508 if (compare_tree_int (minval, 0) > 0
5509 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
5511 minval = integer_zero_node;
5512 range = maxval;
5514 emit_case_bit_tests (index_type, index_expr, minval, range,
5515 thiscase->data.case_stmt.case_list,
5516 default_label);
5519 /* If range of values is much bigger than number of values,
5520 make a sequence of conditional branches instead of a dispatch.
5521 If the switch-index is a constant, do it this way
5522 because we can optimize it. */
5524 else if (count < case_values_threshold ()
5525 || compare_tree_int (range,
5526 (optimize_size ? 3 : 10) * count) > 0
5527 /* RANGE may be signed, and really large ranges will show up
5528 as negative numbers. */
5529 || compare_tree_int (range, 0) < 0
5530 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5531 || flag_pic
5532 #endif
5533 || TREE_CONSTANT (index_expr))
5535 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5537 /* If the index is a short or char that we do not have
5538 an insn to handle comparisons directly, convert it to
5539 a full integer now, rather than letting each comparison
5540 generate the conversion. */
5542 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5543 && ! have_insn_for (COMPARE, GET_MODE (index)))
5545 enum machine_mode wider_mode;
5546 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5547 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5548 if (have_insn_for (COMPARE, wider_mode))
5550 index = convert_to_mode (wider_mode, index, unsignedp);
5551 break;
5555 emit_queue ();
5556 do_pending_stack_adjust ();
5558 index = protect_from_queue (index, 0);
5559 if (GET_CODE (index) == MEM)
5560 index = copy_to_reg (index);
5561 if (GET_CODE (index) == CONST_INT
5562 || TREE_CODE (index_expr) == INTEGER_CST)
5564 /* Make a tree node with the proper constant value
5565 if we don't already have one. */
5566 if (TREE_CODE (index_expr) != INTEGER_CST)
5568 index_expr
5569 = build_int_2 (INTVAL (index),
5570 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5571 index_expr = convert (index_type, index_expr);
5574 /* For constant index expressions we need only
5575 issue an unconditional branch to the appropriate
5576 target code. The job of removing any unreachable
5577 code is left to the optimization phase if the
5578 "-O" option is specified. */
5579 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5580 if (! tree_int_cst_lt (index_expr, n->low)
5581 && ! tree_int_cst_lt (n->high, index_expr))
5582 break;
5584 if (n)
5585 emit_jump (label_rtx (n->code_label));
5586 else
5587 emit_jump (default_label);
5589 else
5591 /* If the index expression is not constant we generate
5592 a binary decision tree to select the appropriate
5593 target code. This is done as follows:
5595 The list of cases is rearranged into a binary tree,
5596 nearly optimal assuming equal probability for each case.
5598 The tree is transformed into RTL, eliminating
5599 redundant test conditions at the same time.
5601 If program flow could reach the end of the
5602 decision tree an unconditional jump to the
5603 default code is emitted. */
5605 use_cost_table
5606 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5607 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5608 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5609 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5610 default_label, index_type);
5611 emit_jump_if_reachable (default_label);
5614 else
5616 table_label = gen_label_rtx ();
5617 if (! try_casesi (index_type, index_expr, minval, range,
5618 table_label, default_label))
5620 index_type = thiscase->data.case_stmt.nominal_type;
5622 /* Index jumptables from zero for suitable values of
5623 minval to avoid a subtraction. */
5624 if (! optimize_size
5625 && compare_tree_int (minval, 0) > 0
5626 && compare_tree_int (minval, 3) < 0)
5628 minval = integer_zero_node;
5629 range = maxval;
5632 if (! try_tablejump (index_type, index_expr, minval, range,
5633 table_label, default_label))
5634 abort ();
5637 /* Get table of labels to jump to, in order of case index. */
5639 ncases = tree_low_cst (range, 0) + 1;
5640 labelvec = alloca (ncases * sizeof (rtx));
5641 memset (labelvec, 0, ncases * sizeof (rtx));
5643 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5645 /* Compute the low and high bounds relative to the minimum
5646 value since that should fit in a HOST_WIDE_INT while the
5647 actual values may not. */
5648 HOST_WIDE_INT i_low
5649 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5650 n->low, minval)), 1);
5651 HOST_WIDE_INT i_high
5652 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5653 n->high, minval)), 1);
5654 HOST_WIDE_INT i;
5656 for (i = i_low; i <= i_high; i ++)
5657 labelvec[i]
5658 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5661 /* Fill in the gaps with the default. */
5662 for (i = 0; i < ncases; i++)
5663 if (labelvec[i] == 0)
5664 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5666 /* Output the table. */
5667 emit_label (table_label);
5669 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5670 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5671 gen_rtx_LABEL_REF (Pmode, table_label),
5672 gen_rtvec_v (ncases, labelvec),
5673 const0_rtx, const0_rtx));
5674 else
5675 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5676 gen_rtvec_v (ncases, labelvec)));
5678 /* If the case insn drops through the table,
5679 after the table we must jump to the default-label.
5680 Otherwise record no drop-through after the table. */
5681 #ifdef CASE_DROPS_THROUGH
5682 emit_jump (default_label);
5683 #else
5684 emit_barrier ();
5685 #endif
5688 before_case = NEXT_INSN (before_case);
5689 end = get_last_insn ();
5690 if (squeeze_notes (&before_case, &end))
5691 abort ();
5692 reorder_insns (before_case, end,
5693 thiscase->data.case_stmt.start);
5695 else
5696 end_cleanup_deferral ();
5698 if (thiscase->exit_label && !exit_done)
5699 emit_label (thiscase->exit_label);
5701 POPSTACK (case_stack);
5703 free_temp_slots ();
5706 /* Convert the tree NODE into a list linked by the right field, with the left
5707 field zeroed. RIGHT is used for recursion; it is a list to be placed
5708 rightmost in the resulting list. */
5710 static struct case_node *
5711 case_tree2list (struct case_node *node, struct case_node *right)
5713 struct case_node *left;
5715 if (node->right)
5716 right = case_tree2list (node->right, right);
5718 node->right = right;
5719 if ((left = node->left))
5721 node->left = 0;
5722 return case_tree2list (left, node);
5725 return node;
5728 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5730 static void
5731 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
5733 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5735 if (op1 == op2)
5736 emit_jump (label);
5738 else
5739 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5740 (GET_MODE (op1) == VOIDmode
5741 ? GET_MODE (op2) : GET_MODE (op1)),
5742 unsignedp, label);
5745 /* Not all case values are encountered equally. This function
5746 uses a heuristic to weight case labels, in cases where that
5747 looks like a reasonable thing to do.
5749 Right now, all we try to guess is text, and we establish the
5750 following weights:
5752 chars above space: 16
5753 digits: 16
5754 default: 12
5755 space, punct: 8
5756 tab: 4
5757 newline: 2
5758 other "\" chars: 1
5759 remaining chars: 0
5761 If we find any cases in the switch that are not either -1 or in the range
5762 of valid ASCII characters, or are control characters other than those
5763 commonly used with "\", don't treat this switch scanning text.
5765 Return 1 if these nodes are suitable for cost estimation, otherwise
5766 return 0. */
5768 static int
5769 estimate_case_costs (case_node_ptr node)
5771 tree min_ascii = integer_minus_one_node;
5772 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5773 case_node_ptr n;
5774 int i;
5776 /* If we haven't already made the cost table, make it now. Note that the
5777 lower bound of the table is -1, not zero. */
5779 if (! cost_table_initialized)
5781 cost_table_initialized = 1;
5783 for (i = 0; i < 128; i++)
5785 if (ISALNUM (i))
5786 COST_TABLE (i) = 16;
5787 else if (ISPUNCT (i))
5788 COST_TABLE (i) = 8;
5789 else if (ISCNTRL (i))
5790 COST_TABLE (i) = -1;
5793 COST_TABLE (' ') = 8;
5794 COST_TABLE ('\t') = 4;
5795 COST_TABLE ('\0') = 4;
5796 COST_TABLE ('\n') = 2;
5797 COST_TABLE ('\f') = 1;
5798 COST_TABLE ('\v') = 1;
5799 COST_TABLE ('\b') = 1;
5802 /* See if all the case expressions look like text. It is text if the
5803 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5804 as signed arithmetic since we don't want to ever access cost_table with a
5805 value less than -1. Also check that none of the constants in a range
5806 are strange control characters. */
5808 for (n = node; n; n = n->right)
5810 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5811 return 0;
5813 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5814 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5815 if (COST_TABLE (i) < 0)
5816 return 0;
5819 /* All interesting values are within the range of interesting
5820 ASCII characters. */
5821 return 1;
5824 /* Determine whether two case labels branch to the same target. */
5826 static bool
5827 same_case_target_p (rtx l1, rtx l2)
5829 rtx i1, i2;
5831 if (l1 == l2)
5832 return true;
5834 i1 = next_real_insn (l1);
5835 i2 = next_real_insn (l2);
5836 if (i1 == i2)
5837 return true;
5839 if (i1 && simplejump_p (i1))
5841 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5844 if (i2 && simplejump_p (i2))
5846 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5848 return l1 == l2;
5851 /* Delete nodes that branch to the default label from a list of
5852 case nodes. Eg. case 5: default: becomes just default: */
5854 static void
5855 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
5857 case_node_ptr ptr;
5859 while (*prev)
5861 ptr = *prev;
5862 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5863 *prev = ptr->right;
5864 else
5865 prev = &ptr->right;
5869 /* Scan an ordered list of case nodes
5870 combining those with consecutive values or ranges.
5872 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5874 static void
5875 group_case_nodes (case_node_ptr head)
5877 case_node_ptr node = head;
5879 while (node)
5881 rtx lab = label_rtx (node->code_label);
5882 case_node_ptr np = node;
5884 /* Try to group the successors of NODE with NODE. */
5885 while (((np = np->right) != 0)
5886 /* Do they jump to the same place? */
5887 && same_case_target_p (label_rtx (np->code_label), lab)
5888 /* Are their ranges consecutive? */
5889 && tree_int_cst_equal (np->low,
5890 fold (build (PLUS_EXPR,
5891 TREE_TYPE (node->high),
5892 node->high,
5893 integer_one_node)))
5894 /* An overflow is not consecutive. */
5895 && tree_int_cst_lt (node->high,
5896 fold (build (PLUS_EXPR,
5897 TREE_TYPE (node->high),
5898 node->high,
5899 integer_one_node))))
5901 node->high = np->high;
5903 /* NP is the first node after NODE which can't be grouped with it.
5904 Delete the nodes in between, and move on to that node. */
5905 node->right = np;
5906 node = np;
5910 /* Take an ordered list of case nodes
5911 and transform them into a near optimal binary tree,
5912 on the assumption that any target code selection value is as
5913 likely as any other.
5915 The transformation is performed by splitting the ordered
5916 list into two equal sections plus a pivot. The parts are
5917 then attached to the pivot as left and right branches. Each
5918 branch is then transformed recursively. */
5920 static void
5921 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
5923 case_node_ptr np;
5925 np = *head;
5926 if (np)
5928 int cost = 0;
5929 int i = 0;
5930 int ranges = 0;
5931 case_node_ptr *npp;
5932 case_node_ptr left;
5934 /* Count the number of entries on branch. Also count the ranges. */
5936 while (np)
5938 if (!tree_int_cst_equal (np->low, np->high))
5940 ranges++;
5941 if (use_cost_table)
5942 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5945 if (use_cost_table)
5946 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5948 i++;
5949 np = np->right;
5952 if (i > 2)
5954 /* Split this list if it is long enough for that to help. */
5955 npp = head;
5956 left = *npp;
5957 if (use_cost_table)
5959 /* Find the place in the list that bisects the list's total cost,
5960 Here I gets half the total cost. */
5961 int n_moved = 0;
5962 i = (cost + 1) / 2;
5963 while (1)
5965 /* Skip nodes while their cost does not reach that amount. */
5966 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5967 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5968 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5969 if (i <= 0)
5970 break;
5971 npp = &(*npp)->right;
5972 n_moved += 1;
5974 if (n_moved == 0)
5976 /* Leave this branch lopsided, but optimize left-hand
5977 side and fill in `parent' fields for right-hand side. */
5978 np = *head;
5979 np->parent = parent;
5980 balance_case_nodes (&np->left, np);
5981 for (; np->right; np = np->right)
5982 np->right->parent = np;
5983 return;
5986 /* If there are just three nodes, split at the middle one. */
5987 else if (i == 3)
5988 npp = &(*npp)->right;
5989 else
5991 /* Find the place in the list that bisects the list's total cost,
5992 where ranges count as 2.
5993 Here I gets half the total cost. */
5994 i = (i + ranges + 1) / 2;
5995 while (1)
5997 /* Skip nodes while their cost does not reach that amount. */
5998 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5999 i--;
6000 i--;
6001 if (i <= 0)
6002 break;
6003 npp = &(*npp)->right;
6006 *head = np = *npp;
6007 *npp = 0;
6008 np->parent = parent;
6009 np->left = left;
6011 /* Optimize each of the two split parts. */
6012 balance_case_nodes (&np->left, np);
6013 balance_case_nodes (&np->right, np);
6015 else
6017 /* Else leave this branch as one level,
6018 but fill in `parent' fields. */
6019 np = *head;
6020 np->parent = parent;
6021 for (; np->right; np = np->right)
6022 np->right->parent = np;
6027 /* Search the parent sections of the case node tree
6028 to see if a test for the lower bound of NODE would be redundant.
6029 INDEX_TYPE is the type of the index expression.
6031 The instructions to generate the case decision tree are
6032 output in the same order as nodes are processed so it is
6033 known that if a parent node checks the range of the current
6034 node minus one that the current node is bounded at its lower
6035 span. Thus the test would be redundant. */
6037 static int
6038 node_has_low_bound (case_node_ptr node, tree index_type)
6040 tree low_minus_one;
6041 case_node_ptr pnode;
6043 /* If the lower bound of this node is the lowest value in the index type,
6044 we need not test it. */
6046 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6047 return 1;
6049 /* If this node has a left branch, the value at the left must be less
6050 than that at this node, so it cannot be bounded at the bottom and
6051 we need not bother testing any further. */
6053 if (node->left)
6054 return 0;
6056 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6057 node->low, integer_one_node));
6059 /* If the subtraction above overflowed, we can't verify anything.
6060 Otherwise, look for a parent that tests our value - 1. */
6062 if (! tree_int_cst_lt (low_minus_one, node->low))
6063 return 0;
6065 for (pnode = node->parent; pnode; pnode = pnode->parent)
6066 if (tree_int_cst_equal (low_minus_one, pnode->high))
6067 return 1;
6069 return 0;
6072 /* Search the parent sections of the case node tree
6073 to see if a test for the upper bound of NODE would be redundant.
6074 INDEX_TYPE is the type of the index expression.
6076 The instructions to generate the case decision tree are
6077 output in the same order as nodes are processed so it is
6078 known that if a parent node checks the range of the current
6079 node plus one that the current node is bounded at its upper
6080 span. Thus the test would be redundant. */
6082 static int
6083 node_has_high_bound (case_node_ptr node, tree index_type)
6085 tree high_plus_one;
6086 case_node_ptr pnode;
6088 /* If there is no upper bound, obviously no test is needed. */
6090 if (TYPE_MAX_VALUE (index_type) == NULL)
6091 return 1;
6093 /* If the upper bound of this node is the highest value in the type
6094 of the index expression, we need not test against it. */
6096 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6097 return 1;
6099 /* If this node has a right branch, the value at the right must be greater
6100 than that at this node, so it cannot be bounded at the top and
6101 we need not bother testing any further. */
6103 if (node->right)
6104 return 0;
6106 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6107 node->high, integer_one_node));
6109 /* If the addition above overflowed, we can't verify anything.
6110 Otherwise, look for a parent that tests our value + 1. */
6112 if (! tree_int_cst_lt (node->high, high_plus_one))
6113 return 0;
6115 for (pnode = node->parent; pnode; pnode = pnode->parent)
6116 if (tree_int_cst_equal (high_plus_one, pnode->low))
6117 return 1;
6119 return 0;
6122 /* Search the parent sections of the
6123 case node tree to see if both tests for the upper and lower
6124 bounds of NODE would be redundant. */
6126 static int
6127 node_is_bounded (case_node_ptr node, tree index_type)
6129 return (node_has_low_bound (node, index_type)
6130 && node_has_high_bound (node, index_type));
6133 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6135 static void
6136 emit_jump_if_reachable (rtx label)
6138 if (GET_CODE (get_last_insn ()) != BARRIER)
6139 emit_jump (label);
6142 /* Emit step-by-step code to select a case for the value of INDEX.
6143 The thus generated decision tree follows the form of the
6144 case-node binary tree NODE, whose nodes represent test conditions.
6145 INDEX_TYPE is the type of the index of the switch.
6147 Care is taken to prune redundant tests from the decision tree
6148 by detecting any boundary conditions already checked by
6149 emitted rtx. (See node_has_high_bound, node_has_low_bound
6150 and node_is_bounded, above.)
6152 Where the test conditions can be shown to be redundant we emit
6153 an unconditional jump to the target code. As a further
6154 optimization, the subordinates of a tree node are examined to
6155 check for bounded nodes. In this case conditional and/or
6156 unconditional jumps as a result of the boundary check for the
6157 current node are arranged to target the subordinates associated
6158 code for out of bound conditions on the current node.
6160 We can assume that when control reaches the code generated here,
6161 the index value has already been compared with the parents
6162 of this node, and determined to be on the same side of each parent
6163 as this node is. Thus, if this node tests for the value 51,
6164 and a parent tested for 52, we don't need to consider
6165 the possibility of a value greater than 51. If another parent
6166 tests for the value 50, then this node need not test anything. */
6168 static void
6169 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
6170 tree index_type)
6172 /* If INDEX has an unsigned type, we must make unsigned branches. */
6173 int unsignedp = TREE_UNSIGNED (index_type);
6174 enum machine_mode mode = GET_MODE (index);
6175 enum machine_mode imode = TYPE_MODE (index_type);
6177 /* See if our parents have already tested everything for us.
6178 If they have, emit an unconditional jump for this node. */
6179 if (node_is_bounded (node, index_type))
6180 emit_jump (label_rtx (node->code_label));
6182 else if (tree_int_cst_equal (node->low, node->high))
6184 /* Node is single valued. First see if the index expression matches
6185 this node and then check our children, if any. */
6187 do_jump_if_equal (index,
6188 convert_modes (mode, imode,
6189 expand_expr (node->low, NULL_RTX,
6190 VOIDmode, 0),
6191 unsignedp),
6192 label_rtx (node->code_label), unsignedp);
6194 if (node->right != 0 && node->left != 0)
6196 /* This node has children on both sides.
6197 Dispatch to one side or the other
6198 by comparing the index value with this node's value.
6199 If one subtree is bounded, check that one first,
6200 so we can avoid real branches in the tree. */
6202 if (node_is_bounded (node->right, index_type))
6204 emit_cmp_and_jump_insns (index,
6205 convert_modes
6206 (mode, imode,
6207 expand_expr (node->high, NULL_RTX,
6208 VOIDmode, 0),
6209 unsignedp),
6210 GT, NULL_RTX, mode, unsignedp,
6211 label_rtx (node->right->code_label));
6212 emit_case_nodes (index, node->left, default_label, index_type);
6215 else if (node_is_bounded (node->left, index_type))
6217 emit_cmp_and_jump_insns (index,
6218 convert_modes
6219 (mode, imode,
6220 expand_expr (node->high, NULL_RTX,
6221 VOIDmode, 0),
6222 unsignedp),
6223 LT, NULL_RTX, mode, unsignedp,
6224 label_rtx (node->left->code_label));
6225 emit_case_nodes (index, node->right, default_label, index_type);
6228 else
6230 /* Neither node is bounded. First distinguish the two sides;
6231 then emit the code for one side at a time. */
6233 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6235 /* See if the value is on the right. */
6236 emit_cmp_and_jump_insns (index,
6237 convert_modes
6238 (mode, imode,
6239 expand_expr (node->high, NULL_RTX,
6240 VOIDmode, 0),
6241 unsignedp),
6242 GT, NULL_RTX, mode, unsignedp,
6243 label_rtx (test_label));
6245 /* Value must be on the left.
6246 Handle the left-hand subtree. */
6247 emit_case_nodes (index, node->left, default_label, index_type);
6248 /* If left-hand subtree does nothing,
6249 go to default. */
6250 emit_jump_if_reachable (default_label);
6252 /* Code branches here for the right-hand subtree. */
6253 expand_label (test_label);
6254 emit_case_nodes (index, node->right, default_label, index_type);
6258 else if (node->right != 0 && node->left == 0)
6260 /* Here we have a right child but no left so we issue conditional
6261 branch to default and process the right child.
6263 Omit the conditional branch to default if we it avoid only one
6264 right child; it costs too much space to save so little time. */
6266 if (node->right->right || node->right->left
6267 || !tree_int_cst_equal (node->right->low, node->right->high))
6269 if (!node_has_low_bound (node, index_type))
6271 emit_cmp_and_jump_insns (index,
6272 convert_modes
6273 (mode, imode,
6274 expand_expr (node->high, NULL_RTX,
6275 VOIDmode, 0),
6276 unsignedp),
6277 LT, NULL_RTX, mode, unsignedp,
6278 default_label);
6281 emit_case_nodes (index, node->right, default_label, index_type);
6283 else
6284 /* We cannot process node->right normally
6285 since we haven't ruled out the numbers less than
6286 this node's value. So handle node->right explicitly. */
6287 do_jump_if_equal (index,
6288 convert_modes
6289 (mode, imode,
6290 expand_expr (node->right->low, NULL_RTX,
6291 VOIDmode, 0),
6292 unsignedp),
6293 label_rtx (node->right->code_label), unsignedp);
6296 else if (node->right == 0 && node->left != 0)
6298 /* Just one subtree, on the left. */
6299 if (node->left->left || node->left->right
6300 || !tree_int_cst_equal (node->left->low, node->left->high))
6302 if (!node_has_high_bound (node, index_type))
6304 emit_cmp_and_jump_insns (index,
6305 convert_modes
6306 (mode, imode,
6307 expand_expr (node->high, NULL_RTX,
6308 VOIDmode, 0),
6309 unsignedp),
6310 GT, NULL_RTX, mode, unsignedp,
6311 default_label);
6314 emit_case_nodes (index, node->left, default_label, index_type);
6316 else
6317 /* We cannot process node->left normally
6318 since we haven't ruled out the numbers less than
6319 this node's value. So handle node->left explicitly. */
6320 do_jump_if_equal (index,
6321 convert_modes
6322 (mode, imode,
6323 expand_expr (node->left->low, NULL_RTX,
6324 VOIDmode, 0),
6325 unsignedp),
6326 label_rtx (node->left->code_label), unsignedp);
6329 else
6331 /* Node is a range. These cases are very similar to those for a single
6332 value, except that we do not start by testing whether this node
6333 is the one to branch to. */
6335 if (node->right != 0 && node->left != 0)
6337 /* Node has subtrees on both sides.
6338 If the right-hand subtree is bounded,
6339 test for it first, since we can go straight there.
6340 Otherwise, we need to make a branch in the control structure,
6341 then handle the two subtrees. */
6342 tree test_label = 0;
6344 if (node_is_bounded (node->right, index_type))
6345 /* Right hand node is fully bounded so we can eliminate any
6346 testing and branch directly to the target code. */
6347 emit_cmp_and_jump_insns (index,
6348 convert_modes
6349 (mode, imode,
6350 expand_expr (node->high, NULL_RTX,
6351 VOIDmode, 0),
6352 unsignedp),
6353 GT, NULL_RTX, mode, unsignedp,
6354 label_rtx (node->right->code_label));
6355 else
6357 /* Right hand node requires testing.
6358 Branch to a label where we will handle it later. */
6360 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6361 emit_cmp_and_jump_insns (index,
6362 convert_modes
6363 (mode, imode,
6364 expand_expr (node->high, NULL_RTX,
6365 VOIDmode, 0),
6366 unsignedp),
6367 GT, NULL_RTX, mode, unsignedp,
6368 label_rtx (test_label));
6371 /* Value belongs to this node or to the left-hand subtree. */
6373 emit_cmp_and_jump_insns (index,
6374 convert_modes
6375 (mode, imode,
6376 expand_expr (node->low, NULL_RTX,
6377 VOIDmode, 0),
6378 unsignedp),
6379 GE, NULL_RTX, mode, unsignedp,
6380 label_rtx (node->code_label));
6382 /* Handle the left-hand subtree. */
6383 emit_case_nodes (index, node->left, default_label, index_type);
6385 /* If right node had to be handled later, do that now. */
6387 if (test_label)
6389 /* If the left-hand subtree fell through,
6390 don't let it fall into the right-hand subtree. */
6391 emit_jump_if_reachable (default_label);
6393 expand_label (test_label);
6394 emit_case_nodes (index, node->right, default_label, index_type);
6398 else if (node->right != 0 && node->left == 0)
6400 /* Deal with values to the left of this node,
6401 if they are possible. */
6402 if (!node_has_low_bound (node, index_type))
6404 emit_cmp_and_jump_insns (index,
6405 convert_modes
6406 (mode, imode,
6407 expand_expr (node->low, NULL_RTX,
6408 VOIDmode, 0),
6409 unsignedp),
6410 LT, NULL_RTX, mode, unsignedp,
6411 default_label);
6414 /* Value belongs to this node or to the right-hand subtree. */
6416 emit_cmp_and_jump_insns (index,
6417 convert_modes
6418 (mode, imode,
6419 expand_expr (node->high, NULL_RTX,
6420 VOIDmode, 0),
6421 unsignedp),
6422 LE, NULL_RTX, mode, unsignedp,
6423 label_rtx (node->code_label));
6425 emit_case_nodes (index, node->right, default_label, index_type);
6428 else if (node->right == 0 && node->left != 0)
6430 /* Deal with values to the right of this node,
6431 if they are possible. */
6432 if (!node_has_high_bound (node, index_type))
6434 emit_cmp_and_jump_insns (index,
6435 convert_modes
6436 (mode, imode,
6437 expand_expr (node->high, NULL_RTX,
6438 VOIDmode, 0),
6439 unsignedp),
6440 GT, NULL_RTX, mode, unsignedp,
6441 default_label);
6444 /* Value belongs to this node or to the left-hand subtree. */
6446 emit_cmp_and_jump_insns (index,
6447 convert_modes
6448 (mode, imode,
6449 expand_expr (node->low, NULL_RTX,
6450 VOIDmode, 0),
6451 unsignedp),
6452 GE, NULL_RTX, mode, unsignedp,
6453 label_rtx (node->code_label));
6455 emit_case_nodes (index, node->left, default_label, index_type);
6458 else
6460 /* Node has no children so we check low and high bounds to remove
6461 redundant tests. Only one of the bounds can exist,
6462 since otherwise this node is bounded--a case tested already. */
6463 int high_bound = node_has_high_bound (node, index_type);
6464 int low_bound = node_has_low_bound (node, index_type);
6466 if (!high_bound && low_bound)
6468 emit_cmp_and_jump_insns (index,
6469 convert_modes
6470 (mode, imode,
6471 expand_expr (node->high, NULL_RTX,
6472 VOIDmode, 0),
6473 unsignedp),
6474 GT, NULL_RTX, mode, unsignedp,
6475 default_label);
6478 else if (!low_bound && high_bound)
6480 emit_cmp_and_jump_insns (index,
6481 convert_modes
6482 (mode, imode,
6483 expand_expr (node->low, NULL_RTX,
6484 VOIDmode, 0),
6485 unsignedp),
6486 LT, NULL_RTX, mode, unsignedp,
6487 default_label);
6489 else if (!low_bound && !high_bound)
6491 /* Widen LOW and HIGH to the same width as INDEX. */
6492 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6493 tree low = build1 (CONVERT_EXPR, type, node->low);
6494 tree high = build1 (CONVERT_EXPR, type, node->high);
6495 rtx low_rtx, new_index, new_bound;
6497 /* Instead of doing two branches, emit one unsigned branch for
6498 (index-low) > (high-low). */
6499 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6500 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6501 NULL_RTX, unsignedp,
6502 OPTAB_WIDEN);
6503 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6504 high, low)),
6505 NULL_RTX, mode, 0);
6507 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6508 mode, 1, default_label);
6511 emit_jump (label_rtx (node->code_label));
6516 #include "gt-stmt.h"