* optabs.c (expand_binop): Make sure the first subword's result
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob9bae835d7cdde8aba92a63110c6bbf6d0708124b
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 02111-1307, USA. */
24 /* Conditional constant propagation.
26 References:
28 Constant propagation with conditional branches,
29 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
31 Building an Optimizing Compiler,
32 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
34 Advanced Compiler Design and Implementation,
35 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "rtl.h"
44 #include "tm_p.h"
45 #include "ggc.h"
46 #include "basic-block.h"
47 #include "output.h"
48 #include "errors.h"
49 #include "expr.h"
50 #include "function.h"
51 #include "diagnostic.h"
52 #include "timevar.h"
53 #include "tree-dump.h"
54 #include "tree-flow.h"
55 #include "tree-pass.h"
56 #include "tree-ssa-propagate.h"
57 #include "langhooks.h"
60 /* Possible lattice values. */
61 typedef enum
63 UNINITIALIZED = 0,
64 UNDEFINED,
65 UNKNOWN_VAL,
66 CONSTANT,
67 VARYING
68 } latticevalue;
70 /* Main structure for CCP. Contains the lattice value and, if it's a
71 constant, the constant value. */
72 typedef struct
74 latticevalue lattice_val;
75 tree const_val;
76 } value;
78 /* This is used to track the current value of each variable. */
79 static value *value_vector;
82 /* Dump lattice value VAL to file OUTF prefixed by PREFIX. */
84 static void
85 dump_lattice_value (FILE *outf, const char *prefix, value val)
87 switch (val.lattice_val)
89 case UNDEFINED:
90 fprintf (outf, "%sUNDEFINED", prefix);
91 break;
92 case VARYING:
93 fprintf (outf, "%sVARYING", prefix);
94 break;
95 case UNKNOWN_VAL:
96 fprintf (outf, "%sUNKNOWN_VAL", prefix);
97 break;
98 case CONSTANT:
99 fprintf (outf, "%sCONSTANT ", prefix);
100 print_generic_expr (outf, val.const_val, dump_flags);
101 break;
102 default:
103 gcc_unreachable ();
108 /* Return a default value for variable VAR using the following rules:
110 1- Function arguments are considered VARYING.
112 2- Global and static variables that are declared constant are
113 considered CONSTANT.
115 3- Any other virtually defined variable is considered UNKNOWN_VAL.
117 4- Any other value is considered UNDEFINED. This is useful when
118 considering PHI nodes. PHI arguments that are undefined do not
119 change the constant value of the PHI node, which allows for more
120 constants to be propagated. */
122 static value
123 get_default_value (tree var)
125 value val;
126 tree sym;
128 if (TREE_CODE (var) == SSA_NAME)
129 sym = SSA_NAME_VAR (var);
130 else
132 gcc_assert (DECL_P (var));
133 sym = var;
136 val.lattice_val = UNDEFINED;
137 val.const_val = NULL_TREE;
139 if (TREE_CODE (var) == SSA_NAME
140 && SSA_NAME_VALUE (var)
141 && is_gimple_min_invariant (SSA_NAME_VALUE (var)))
143 val.lattice_val = CONSTANT;
144 val.const_val = SSA_NAME_VALUE (var);
146 else if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
148 /* Function arguments and volatile variables are considered VARYING. */
149 val.lattice_val = VARYING;
151 else if (TREE_STATIC (sym))
153 /* Globals and static variables are considered UNKNOWN_VAL,
154 unless they are declared 'const'. */
155 if (TREE_READONLY (sym)
156 && DECL_INITIAL (sym)
157 && is_gimple_min_invariant (DECL_INITIAL (sym)))
159 val.lattice_val = CONSTANT;
160 val.const_val = DECL_INITIAL (sym);
162 else
164 val.const_val = NULL_TREE;
165 val.lattice_val = UNKNOWN_VAL;
168 else if (!is_gimple_reg (sym))
170 val.const_val = NULL_TREE;
171 val.lattice_val = UNKNOWN_VAL;
173 else
175 enum tree_code code;
176 tree stmt = SSA_NAME_DEF_STMT (var);
178 if (!IS_EMPTY_STMT (stmt))
180 code = TREE_CODE (stmt);
181 if (code != MODIFY_EXPR && code != PHI_NODE)
182 val.lattice_val = VARYING;
186 return val;
189 /* Get the constant value associated with variable VAR. */
191 static value *
192 get_value (tree var)
194 value *val;
196 gcc_assert (TREE_CODE (var) == SSA_NAME);
198 val = &value_vector[SSA_NAME_VERSION (var)];
199 if (val->lattice_val == UNINITIALIZED)
200 *val = get_default_value (var);
202 return val;
206 /* Set the lattice value for variable VAR to VAL. Return true if VAL
207 is different from VAR's previous value. */
209 static bool
210 set_lattice_value (tree var, value val)
212 value *old = get_value (var);
214 if (val.lattice_val == UNDEFINED)
216 /* CONSTANT->UNDEFINED is never a valid state transition. */
217 gcc_assert (old->lattice_val != CONSTANT);
219 /* UNKNOWN_VAL->UNDEFINED is never a valid state transition. */
220 gcc_assert (old->lattice_val != UNKNOWN_VAL);
222 /* VARYING->UNDEFINED is generally not a valid state transition,
223 except for values which are initialized to VARYING. */
224 gcc_assert (old->lattice_val != VARYING
225 || get_default_value (var).lattice_val == VARYING);
227 else if (val.lattice_val == CONSTANT)
228 /* VARYING -> CONSTANT is an invalid state transition, except
229 for objects which start off in a VARYING state. */
230 gcc_assert (old->lattice_val != VARYING
231 || get_default_value (var).lattice_val == VARYING);
233 /* If the constant for VAR has changed, then this VAR is really varying. */
234 if (old->lattice_val == CONSTANT
235 && val.lattice_val == CONSTANT
236 && !simple_cst_equal (old->const_val, val.const_val))
238 val.lattice_val = VARYING;
239 val.const_val = NULL_TREE;
242 if (old->lattice_val != val.lattice_val)
244 if (dump_file && (dump_flags & TDF_DETAILS))
246 dump_lattice_value (dump_file, "Lattice value changed to ", val);
247 fprintf (dump_file, ". Adding definition to SSA edges.\n");
250 *old = val;
251 return true;
254 return false;
258 /* Set the lattice value for the variable VAR to VARYING. */
260 static void
261 def_to_varying (tree var)
263 value val;
264 val.lattice_val = VARYING;
265 val.const_val = NULL_TREE;
266 set_lattice_value (var, val);
270 /* Return the likely latticevalue for STMT.
272 If STMT has no operands, then return CONSTANT.
274 Else if any operands of STMT are undefined, then return UNDEFINED.
276 Else if any operands of STMT are constants, then return CONSTANT.
278 Else return VARYING. */
280 static latticevalue
281 likely_value (tree stmt)
283 vuse_optype vuses;
284 int found_constant = 0;
285 stmt_ann_t ann;
286 tree use;
287 ssa_op_iter iter;
289 /* If the statement makes aliased loads or has volatile operands, it
290 won't fold to a constant value. */
291 ann = stmt_ann (stmt);
292 if (ann->makes_aliased_loads || ann->has_volatile_ops)
293 return VARYING;
295 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
296 in the presence of const and pure calls. */
297 if (get_call_expr_in (stmt) != NULL_TREE)
298 return VARYING;
300 get_stmt_operands (stmt);
302 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
304 value *val = get_value (use);
306 if (val->lattice_val == UNDEFINED)
307 return UNDEFINED;
309 if (val->lattice_val == CONSTANT)
310 found_constant = 1;
313 vuses = VUSE_OPS (ann);
315 if (NUM_VUSES (vuses))
317 tree vuse = VUSE_OP (vuses, 0);
318 value *val = get_value (vuse);
320 if (val->lattice_val == UNKNOWN_VAL)
321 return UNKNOWN_VAL;
323 /* There should be no VUSE operands that are UNDEFINED. */
324 gcc_assert (val->lattice_val != UNDEFINED);
326 if (val->lattice_val == CONSTANT)
327 found_constant = 1;
330 return ((found_constant || (!USE_OPS (ann) && !vuses)) ? CONSTANT : VARYING);
334 /* Function indicating whether we ought to include information for VAR
335 when calculating immediate uses. */
337 static bool
338 need_imm_uses_for (tree var)
340 return get_value (var)->lattice_val != VARYING;
344 /* Initialize local data structures for CCP. */
346 static void
347 ccp_initialize (void)
349 basic_block bb;
350 sbitmap is_may_def;
352 value_vector = (value *) xmalloc (num_ssa_names * sizeof (value));
353 memset (value_vector, 0, num_ssa_names * sizeof (value));
355 /* Set of SSA_NAMEs that are defined by a V_MAY_DEF. */
356 is_may_def = sbitmap_alloc (num_ssa_names);
357 sbitmap_zero (is_may_def);
359 /* Initialize simulation flags for PHI nodes and statements. */
360 FOR_EACH_BB (bb)
362 block_stmt_iterator i;
364 /* Mark all V_MAY_DEF operands VARYING. */
365 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
367 bool is_varying = false;
368 tree stmt = bsi_stmt (i);
369 ssa_op_iter iter;
370 tree def;
372 get_stmt_operands (stmt);
374 /* Get the default value for each DEF and V_MUST_DEF. */
375 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter,
376 (SSA_OP_DEF | SSA_OP_VMUSTDEF))
378 if (get_value (def)->lattice_val == VARYING)
379 is_varying = true;
382 /* Mark all V_MAY_DEF operands VARYING. */
383 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_VMAYDEF)
385 get_value (def)->lattice_val = VARYING;
386 SET_BIT (is_may_def, SSA_NAME_VERSION (def));
389 /* Statements other than MODIFY_EXPR, COND_EXPR and
390 SWITCH_EXPR are not interesting for constant propagation.
391 Mark them VARYING. */
392 if (TREE_CODE (stmt) != MODIFY_EXPR
393 && TREE_CODE (stmt) != COND_EXPR
394 && TREE_CODE (stmt) != SWITCH_EXPR)
395 is_varying = true;
397 DONT_SIMULATE_AGAIN (stmt) = is_varying;
401 /* Now process PHI nodes. */
402 FOR_EACH_BB (bb)
404 tree phi, var;
405 int x;
407 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
409 value *val = get_value (PHI_RESULT (phi));
411 for (x = 0; x < PHI_NUM_ARGS (phi); x++)
413 var = PHI_ARG_DEF (phi, x);
415 /* If one argument has a V_MAY_DEF, the result is
416 VARYING. */
417 if (TREE_CODE (var) == SSA_NAME)
419 if (TEST_BIT (is_may_def, SSA_NAME_VERSION (var)))
421 val->lattice_val = VARYING;
422 SET_BIT (is_may_def, SSA_NAME_VERSION (PHI_RESULT (phi)));
423 break;
428 DONT_SIMULATE_AGAIN (phi) = (val->lattice_val == VARYING);
432 sbitmap_free (is_may_def);
434 /* Compute immediate uses for variables we care about. */
435 compute_immediate_uses (TDFA_USE_OPS | TDFA_USE_VOPS, need_imm_uses_for);
439 /* Replace USE references in statement STMT with their immediate reaching
440 definition. Return true if at least one reference was replaced. If
441 REPLACED_ADDRESSES_P is given, it will be set to true if an address
442 constant was replaced. */
444 static bool
445 replace_uses_in (tree stmt, bool *replaced_addresses_p)
447 bool replaced = false;
448 use_operand_p use;
449 ssa_op_iter iter;
451 if (replaced_addresses_p)
452 *replaced_addresses_p = false;
454 get_stmt_operands (stmt);
456 FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
458 tree tuse = USE_FROM_PTR (use);
459 value *val = get_value (tuse);
461 if (val->lattice_val != CONSTANT)
462 continue;
464 if (TREE_CODE (stmt) == ASM_EXPR
465 && !may_propagate_copy_into_asm (tuse))
466 continue;
468 SET_USE (use, val->const_val);
470 replaced = true;
471 if (POINTER_TYPE_P (TREE_TYPE (tuse)) && replaced_addresses_p)
472 *replaced_addresses_p = true;
475 return replaced;
479 /* Replace the VUSE references in statement STMT with its immediate reaching
480 definition. Return true if the reference was replaced. If
481 REPLACED_ADDRESSES_P is given, it will be set to true if an address
482 constant was replaced. */
484 static bool
485 replace_vuse_in (tree stmt, bool *replaced_addresses_p)
487 bool replaced = false;
488 vuse_optype vuses;
489 use_operand_p vuse;
490 value *val;
492 if (replaced_addresses_p)
493 *replaced_addresses_p = false;
495 get_stmt_operands (stmt);
497 vuses = STMT_VUSE_OPS (stmt);
499 if (NUM_VUSES (vuses) != 1)
500 return false;
502 vuse = VUSE_OP_PTR (vuses, 0);
503 val = get_value (USE_FROM_PTR (vuse));
505 if (val->lattice_val == CONSTANT
506 && TREE_CODE (stmt) == MODIFY_EXPR
507 && DECL_P (TREE_OPERAND (stmt, 1))
508 && TREE_OPERAND (stmt, 1) == SSA_NAME_VAR (USE_FROM_PTR (vuse)))
510 TREE_OPERAND (stmt, 1) = val->const_val;
511 replaced = true;
512 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (vuse)))
513 && replaced_addresses_p)
514 *replaced_addresses_p = true;
517 return replaced;
521 /* Perform final substitution and folding. After this pass the program
522 should still be in SSA form. */
524 static void
525 substitute_and_fold (void)
527 basic_block bb;
528 unsigned int i;
530 if (dump_file && (dump_flags & TDF_DETAILS))
531 fprintf (dump_file,
532 "\nSubstituing constants and folding statements\n\n");
534 /* Substitute constants in every statement of every basic block. */
535 FOR_EACH_BB (bb)
537 block_stmt_iterator i;
538 tree phi;
540 /* Propagate our known constants into PHI nodes. */
541 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
543 int i;
545 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
547 value *new_val;
548 use_operand_p orig_p = PHI_ARG_DEF_PTR (phi, i);
549 tree orig = USE_FROM_PTR (orig_p);
551 if (! SSA_VAR_P (orig))
552 break;
554 new_val = get_value (orig);
555 if (new_val->lattice_val == CONSTANT
556 && may_propagate_copy (orig, new_val->const_val))
557 SET_USE (orig_p, new_val->const_val);
561 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
563 bool replaced_address;
564 tree stmt = bsi_stmt (i);
566 /* Skip statements that have been folded already. */
567 if (stmt_modified_p (stmt) || !is_exec_stmt (stmt))
568 continue;
570 /* Replace the statement with its folded version and mark it
571 folded. */
572 if (dump_file && (dump_flags & TDF_DETAILS))
574 fprintf (dump_file, "Line %d: replaced ", get_lineno (stmt));
575 print_generic_stmt (dump_file, stmt, TDF_SLIM);
578 if (replace_uses_in (stmt, &replaced_address)
579 || replace_vuse_in (stmt, &replaced_address))
581 bool changed = fold_stmt (bsi_stmt_ptr (i));
582 stmt = bsi_stmt(i);
584 /* If we folded a builtin function, we'll likely
585 need to rename VDEFs. */
586 if (replaced_address || changed)
587 mark_new_vars_to_rename (stmt, vars_to_rename);
589 /* If we cleaned up EH information from the statement,
590 remove EH edges. */
591 if (maybe_clean_eh_stmt (stmt))
592 tree_purge_dead_eh_edges (bb);
594 modify_stmt (stmt);
597 if (dump_file && (dump_flags & TDF_DETAILS))
599 fprintf (dump_file, " with ");
600 print_generic_stmt (dump_file, stmt, TDF_SLIM);
601 fprintf (dump_file, "\n");
606 /* And transfer what we learned from VALUE_VECTOR into the
607 SSA_NAMEs themselves. This probably isn't terribly important
608 since we probably constant propagated the values to their
609 use sites above. */
610 for (i = 0; i < num_ssa_names; i++)
612 tree name = ssa_name (i);
613 value *value;
615 if (!name)
616 continue;
618 value = get_value (name);
619 if (value->lattice_val == CONSTANT
620 && is_gimple_reg (name)
621 && is_gimple_min_invariant (value->const_val))
622 SSA_NAME_VALUE (name) = value->const_val;
627 /* Free allocated storage. */
629 static void
630 ccp_finalize (void)
632 /* Perform substitutions based on the known constant values. */
633 substitute_and_fold ();
635 free (value_vector);
640 /* Compute the meet operator between VAL1 and VAL2:
642 any M UNDEFINED = any
643 any M VARYING = VARYING
644 any M UNKNOWN_VAL = UNKNOWN_VAL
645 Ci M Cj = Ci if (i == j)
646 Ci M Cj = VARYING if (i != j) */
647 static value
648 ccp_lattice_meet (value val1, value val2)
650 value result;
652 /* any M UNDEFINED = any. */
653 if (val1.lattice_val == UNDEFINED)
654 return val2;
655 else if (val2.lattice_val == UNDEFINED)
656 return val1;
658 /* any M VARYING = VARYING. */
659 if (val1.lattice_val == VARYING || val2.lattice_val == VARYING)
661 result.lattice_val = VARYING;
662 result.const_val = NULL_TREE;
663 return result;
666 /* any M UNKNOWN_VAL = UNKNOWN_VAL. */
667 if (val1.lattice_val == UNKNOWN_VAL
668 || val2.lattice_val == UNKNOWN_VAL)
670 result.lattice_val = UNKNOWN_VAL;
671 result.const_val = NULL_TREE;
672 return result;
675 /* Ci M Cj = Ci if (i == j)
676 Ci M Cj = VARYING if (i != j) */
677 if (simple_cst_equal (val1.const_val, val2.const_val) == 1)
679 result.lattice_val = CONSTANT;
680 result.const_val = val1.const_val;
682 else
684 result.lattice_val = VARYING;
685 result.const_val = NULL_TREE;
688 return result;
692 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
693 lattice values to determine PHI_NODE's lattice value. The value of a
694 PHI node is determined calling ccp_lattice_meet() with all the arguments
695 of the PHI node that are incoming via executable edges. */
697 static enum ssa_prop_result
698 ccp_visit_phi_node (tree phi)
700 value new_val, *old_val;
701 int i;
703 if (dump_file && (dump_flags & TDF_DETAILS))
705 fprintf (dump_file, "\nVisiting PHI node: ");
706 print_generic_expr (dump_file, phi, dump_flags);
709 old_val = get_value (PHI_RESULT (phi));
710 switch (old_val->lattice_val)
712 case VARYING:
713 return SSA_PROP_NOT_INTERESTING;
715 case CONSTANT:
716 new_val = *old_val;
717 break;
719 case UNKNOWN_VAL:
720 /* To avoid the default value of UNKNOWN_VAL overriding
721 that of its possible constant arguments, temporarily
722 set the PHI node's default lattice value to be
723 UNDEFINED. If the PHI node's old value was UNKNOWN_VAL and
724 the new value is UNDEFINED, then we prevent the invalid
725 transition by not calling set_lattice_value. */
726 new_val.lattice_val = UNDEFINED;
727 new_val.const_val = NULL_TREE;
728 break;
730 case UNDEFINED:
731 case UNINITIALIZED:
732 new_val.lattice_val = UNDEFINED;
733 new_val.const_val = NULL_TREE;
734 break;
736 default:
737 gcc_unreachable ();
740 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
742 /* Compute the meet operator over all the PHI arguments. */
743 edge e = PHI_ARG_EDGE (phi, i);
745 if (dump_file && (dump_flags & TDF_DETAILS))
747 fprintf (dump_file,
748 "\n Argument #%d (%d -> %d %sexecutable)\n",
749 i, e->src->index, e->dest->index,
750 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
753 /* If the incoming edge is executable, Compute the meet operator for
754 the existing value of the PHI node and the current PHI argument. */
755 if (e->flags & EDGE_EXECUTABLE)
757 tree rdef = PHI_ARG_DEF (phi, i);
758 value *rdef_val, val;
760 if (is_gimple_min_invariant (rdef))
762 val.lattice_val = CONSTANT;
763 val.const_val = rdef;
764 rdef_val = &val;
766 else
767 rdef_val = get_value (rdef);
769 new_val = ccp_lattice_meet (new_val, *rdef_val);
771 if (dump_file && (dump_flags & TDF_DETAILS))
773 fprintf (dump_file, "\t");
774 print_generic_expr (dump_file, rdef, dump_flags);
775 dump_lattice_value (dump_file, "\tValue: ", *rdef_val);
776 fprintf (dump_file, "\n");
779 if (new_val.lattice_val == VARYING)
780 break;
784 if (dump_file && (dump_flags & TDF_DETAILS))
786 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
787 fprintf (dump_file, "\n\n");
790 /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
791 if (old_val->lattice_val == UNKNOWN_VAL
792 && new_val.lattice_val == UNDEFINED)
793 return SSA_PROP_NOT_INTERESTING;
795 /* Otherwise, make the transition to the new value. */
796 if (set_lattice_value (PHI_RESULT (phi), new_val))
798 if (new_val.lattice_val == VARYING)
799 return SSA_PROP_VARYING;
800 else
801 return SSA_PROP_INTERESTING;
803 else
804 return SSA_PROP_NOT_INTERESTING;
808 /* CCP specific front-end to the non-destructive constant folding
809 routines.
811 Attempt to simplify the RHS of STMT knowing that one or more
812 operands are constants.
814 If simplification is possible, return the simplified RHS,
815 otherwise return the original RHS. */
817 static tree
818 ccp_fold (tree stmt)
820 tree rhs = get_rhs (stmt);
821 enum tree_code code = TREE_CODE (rhs);
822 enum tree_code_class kind = TREE_CODE_CLASS (code);
823 tree retval = NULL_TREE;
824 vuse_optype vuses;
826 vuses = STMT_VUSE_OPS (stmt);
828 /* If the RHS is just a variable, then that variable must now have
829 a constant value that we can return directly. */
830 if (TREE_CODE (rhs) == SSA_NAME)
831 return get_value (rhs)->const_val;
832 else if (DECL_P (rhs)
833 && NUM_VUSES (vuses) == 1
834 && rhs == SSA_NAME_VAR (VUSE_OP (vuses, 0)))
835 return get_value (VUSE_OP (vuses, 0))->const_val;
837 /* Unary operators. Note that we know the single operand must
838 be a constant. So this should almost always return a
839 simplified RHS. */
840 if (kind == tcc_unary)
842 /* Handle unary operators which can appear in GIMPLE form. */
843 tree op0 = TREE_OPERAND (rhs, 0);
845 /* Simplify the operand down to a constant. */
846 if (TREE_CODE (op0) == SSA_NAME)
848 value *val = get_value (op0);
849 if (val->lattice_val == CONSTANT)
850 op0 = get_value (op0)->const_val;
853 retval = fold_unary_to_constant (code, TREE_TYPE (rhs), op0);
855 /* If we folded, but did not create an invariant, then we can not
856 use this expression. */
857 if (retval && ! is_gimple_min_invariant (retval))
858 return NULL;
860 /* If we could not fold the expression, but the arguments are all
861 constants and gimple values, then build and return the new
862 expression.
864 In some cases the new expression is still something we can
865 use as a replacement for an argument. This happens with
866 NOP conversions of types for example.
868 In other cases the new expression can not be used as a
869 replacement for an argument (as it would create non-gimple
870 code). But the new expression can still be used to derive
871 other constants. */
872 if (! retval && is_gimple_min_invariant (op0))
873 return build1 (code, TREE_TYPE (rhs), op0);
876 /* Binary and comparison operators. We know one or both of the
877 operands are constants. */
878 else if (kind == tcc_binary
879 || kind == tcc_comparison
880 || code == TRUTH_AND_EXPR
881 || code == TRUTH_OR_EXPR
882 || code == TRUTH_XOR_EXPR)
884 /* Handle binary and comparison operators that can appear in
885 GIMPLE form. */
886 tree op0 = TREE_OPERAND (rhs, 0);
887 tree op1 = TREE_OPERAND (rhs, 1);
889 /* Simplify the operands down to constants when appropriate. */
890 if (TREE_CODE (op0) == SSA_NAME)
892 value *val = get_value (op0);
893 if (val->lattice_val == CONSTANT)
894 op0 = val->const_val;
897 if (TREE_CODE (op1) == SSA_NAME)
899 value *val = get_value (op1);
900 if (val->lattice_val == CONSTANT)
901 op1 = val->const_val;
904 retval = fold_binary_to_constant (code, TREE_TYPE (rhs), op0, op1);
906 /* If we folded, but did not create an invariant, then we can not
907 use this expression. */
908 if (retval && ! is_gimple_min_invariant (retval))
909 return NULL;
911 /* If we could not fold the expression, but the arguments are all
912 constants and gimple values, then build and return the new
913 expression.
915 In some cases the new expression is still something we can
916 use as a replacement for an argument. This happens with
917 NOP conversions of types for example.
919 In other cases the new expression can not be used as a
920 replacement for an argument (as it would create non-gimple
921 code). But the new expression can still be used to derive
922 other constants. */
923 if (! retval
924 && is_gimple_min_invariant (op0)
925 && is_gimple_min_invariant (op1))
926 return build (code, TREE_TYPE (rhs), op0, op1);
929 /* We may be able to fold away calls to builtin functions if their
930 arguments are constants. */
931 else if (code == CALL_EXPR
932 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
933 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
934 == FUNCTION_DECL)
935 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
937 use_optype uses = STMT_USE_OPS (stmt);
938 if (NUM_USES (uses) != 0)
940 tree *orig;
941 tree fndecl, arglist;
942 size_t i;
944 /* Preserve the original values of every operand. */
945 orig = xmalloc (sizeof (tree) * NUM_USES (uses));
946 for (i = 0; i < NUM_USES (uses); i++)
947 orig[i] = USE_OP (uses, i);
949 /* Substitute operands with their values and try to fold. */
950 replace_uses_in (stmt, NULL);
951 fndecl = get_callee_fndecl (rhs);
952 arglist = TREE_OPERAND (rhs, 1);
953 retval = fold_builtin (fndecl, arglist, false);
955 /* Restore operands to their original form. */
956 for (i = 0; i < NUM_USES (uses); i++)
957 SET_USE_OP (uses, i, orig[i]);
958 free (orig);
961 else
962 return rhs;
964 /* If we got a simplified form, see if we need to convert its type. */
965 if (retval)
966 return fold_convert (TREE_TYPE (rhs), retval);
968 /* No simplification was possible. */
969 return rhs;
973 /* Evaluate statement STMT. */
975 static value
976 evaluate_stmt (tree stmt)
978 value val;
979 tree simplified;
980 latticevalue likelyvalue = likely_value (stmt);
982 /* If the statement is likely to have a CONSTANT result, then try
983 to fold the statement to determine the constant value. */
984 if (likelyvalue == CONSTANT)
985 simplified = ccp_fold (stmt);
986 /* If the statement is likely to have a VARYING result, then do not
987 bother folding the statement. */
988 else if (likelyvalue == VARYING)
989 simplified = get_rhs (stmt);
990 /* Otherwise the statement is likely to have an UNDEFINED value and
991 there will be nothing to do. */
992 else
993 simplified = NULL_TREE;
995 if (simplified && is_gimple_min_invariant (simplified))
997 /* The statement produced a constant value. */
998 val.lattice_val = CONSTANT;
999 val.const_val = simplified;
1001 else
1003 /* The statement produced a nonconstant value. If the statement
1004 had undefined or virtual operands, then the result of the
1005 statement should be undefined or virtual respectively.
1006 Else the result of the statement is VARYING. */
1007 val.lattice_val = (likelyvalue == UNDEFINED ? UNDEFINED : VARYING);
1008 val.lattice_val = (likelyvalue == UNKNOWN_VAL
1009 ? UNKNOWN_VAL : val.lattice_val);
1010 val.const_val = NULL_TREE;
1013 return val;
1017 /* Visit the assignment statement STMT. Set the value of its LHS to the
1018 value computed by the RHS and store LHS in *OUTPUT_P. */
1020 static enum ssa_prop_result
1021 visit_assignment (tree stmt, tree *output_p)
1023 value val;
1024 tree lhs, rhs;
1025 vuse_optype vuses;
1026 v_must_def_optype v_must_defs;
1028 lhs = TREE_OPERAND (stmt, 0);
1029 rhs = TREE_OPERAND (stmt, 1);
1030 vuses = STMT_VUSE_OPS (stmt);
1031 v_must_defs = STMT_V_MUST_DEF_OPS (stmt);
1033 gcc_assert (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt)) == 0);
1034 gcc_assert (NUM_V_MUST_DEFS (v_must_defs) == 1
1035 || TREE_CODE (lhs) == SSA_NAME);
1037 /* We require the SSA version number of the lhs for the value_vector.
1038 Make sure we have it. */
1039 if (TREE_CODE (lhs) != SSA_NAME)
1041 /* If we make it here, then stmt only has one definition:
1042 a V_MUST_DEF. */
1043 lhs = V_MUST_DEF_RESULT (v_must_defs, 0);
1046 if (TREE_CODE (rhs) == SSA_NAME)
1048 /* For a simple copy operation, we copy the lattice values. */
1049 value *nval = get_value (rhs);
1050 val = *nval;
1052 else if (DECL_P (rhs)
1053 && NUM_VUSES (vuses) == 1
1054 && rhs == SSA_NAME_VAR (VUSE_OP (vuses, 0)))
1056 /* Same as above, but the rhs is not a gimple register and yet
1057 has a known VUSE. */
1058 value *nval = get_value (VUSE_OP (vuses, 0));
1059 val = *nval;
1061 else
1062 /* Evaluate the statement. */
1063 val = evaluate_stmt (stmt);
1065 /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
1066 value to be a VIEW_CONVERT_EXPR of the old constant value.
1068 ??? Also, if this was a definition of a bitfield, we need to widen
1069 the constant value into the type of the destination variable. This
1070 should not be necessary if GCC represented bitfields properly. */
1072 tree orig_lhs = TREE_OPERAND (stmt, 0);
1074 if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
1075 && val.lattice_val == CONSTANT)
1077 tree w = fold (build1 (VIEW_CONVERT_EXPR,
1078 TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
1079 val.const_val));
1081 orig_lhs = TREE_OPERAND (orig_lhs, 1);
1082 if (w && is_gimple_min_invariant (w))
1083 val.const_val = w;
1084 else
1086 val.lattice_val = VARYING;
1087 val.const_val = NULL;
1091 if (val.lattice_val == CONSTANT
1092 && TREE_CODE (orig_lhs) == COMPONENT_REF
1093 && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1)))
1095 tree w = widen_bitfield (val.const_val, TREE_OPERAND (orig_lhs, 1),
1096 orig_lhs);
1098 if (w && is_gimple_min_invariant (w))
1099 val.const_val = w;
1100 else
1102 val.lattice_val = VARYING;
1103 val.const_val = NULL;
1108 /* If LHS is not a gimple register, then it cannot take on an
1109 UNDEFINED value. */
1110 if (!is_gimple_reg (SSA_NAME_VAR (lhs))
1111 && val.lattice_val == UNDEFINED)
1112 val.lattice_val = UNKNOWN_VAL;
1114 /* Set the lattice value of the statement's output. */
1115 if (set_lattice_value (lhs, val))
1117 *output_p = lhs;
1118 if (val.lattice_val == VARYING)
1119 return SSA_PROP_VARYING;
1120 else
1121 return SSA_PROP_INTERESTING;
1123 else
1124 return SSA_PROP_NOT_INTERESTING;
1128 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1129 if it can determine which edge will be taken. Otherwise, return
1130 SSA_PROP_VARYING. */
1132 static enum ssa_prop_result
1133 visit_cond_stmt (tree stmt, edge *taken_edge_p)
1135 value val;
1136 basic_block block;
1138 block = bb_for_stmt (stmt);
1139 val = evaluate_stmt (stmt);
1141 /* Find which edge out of the conditional block will be taken and add it
1142 to the worklist. If no single edge can be determined statically,
1143 return SSA_PROP_VARYING to feed all the outgoing edges to the
1144 propagation engine. */
1145 *taken_edge_p = val.const_val ? find_taken_edge (block, val.const_val) : 0;
1146 if (*taken_edge_p)
1147 return SSA_PROP_INTERESTING;
1148 else
1149 return SSA_PROP_VARYING;
1153 /* Evaluate statement STMT. If the statement produces an output value and
1154 its evaluation changes the lattice value of its output, return
1155 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1156 output value.
1158 If STMT is a conditional branch and we can determine its truth
1159 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1160 value, return SSA_PROP_VARYING. */
1162 static enum ssa_prop_result
1163 ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
1165 stmt_ann_t ann;
1166 v_may_def_optype v_may_defs;
1167 v_must_def_optype v_must_defs;
1168 tree def;
1169 ssa_op_iter iter;
1171 if (dump_file && (dump_flags & TDF_DETAILS))
1173 fprintf (dump_file, "\nVisiting statement: ");
1174 print_generic_stmt (dump_file, stmt, TDF_SLIM);
1175 fprintf (dump_file, "\n");
1178 ann = stmt_ann (stmt);
1180 v_must_defs = V_MUST_DEF_OPS (ann);
1181 v_may_defs = V_MAY_DEF_OPS (ann);
1182 if (TREE_CODE (stmt) == MODIFY_EXPR
1183 && NUM_V_MAY_DEFS (v_may_defs) == 0
1184 && (NUM_V_MUST_DEFS (v_must_defs) == 1
1185 || TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME))
1187 /* If the statement is an assignment that produces a single
1188 output value, evaluate its RHS to see if the lattice value of
1189 its output has changed. */
1190 return visit_assignment (stmt, output_p);
1192 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
1194 /* If STMT is a conditional branch, see if we can determine
1195 which branch will be taken. */
1196 return visit_cond_stmt (stmt, taken_edge_p);
1199 /* Any other kind of statement is not interesting for constant
1200 propagation and, therefore, not worth simulating. */
1201 if (dump_file && (dump_flags & TDF_DETAILS))
1202 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1204 /* Definitions made by statements other than assignments to
1205 SSA_NAMEs represent unknown modifications to their outputs.
1206 Mark them VARYING. */
1207 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
1208 def_to_varying (def);
1210 /* Mark all V_MAY_DEF operands VARYING. */
1211 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_VMAYDEF)
1212 def_to_varying (def);
1214 return SSA_PROP_VARYING;
1218 /* Main entry point for SSA Conditional Constant Propagation.
1220 [ DESCRIBE MAIN ALGORITHM HERE ] */
1222 static void
1223 execute_ssa_ccp (void)
1225 ccp_initialize ();
1226 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1227 ccp_finalize ();
1231 static bool
1232 gate_ccp (void)
1234 return flag_tree_ccp != 0;
1238 struct tree_opt_pass pass_ccp =
1240 "ccp", /* name */
1241 gate_ccp, /* gate */
1242 execute_ssa_ccp, /* execute */
1243 NULL, /* sub */
1244 NULL, /* next */
1245 0, /* static_pass_number */
1246 TV_TREE_CCP, /* tv_id */
1247 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1248 0, /* properties_provided */
1249 0, /* properties_destroyed */
1250 0, /* todo_flags_start */
1251 TODO_cleanup_cfg | TODO_dump_func | TODO_rename_vars
1252 | TODO_ggc_collect | TODO_verify_ssa
1253 | TODO_verify_stmts, /* todo_flags_finish */
1254 0 /* letter */
1258 /* Given a constant value VAL for bitfield FIELD, and a destination
1259 variable VAR, return VAL appropriately widened to fit into VAR. If
1260 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1262 tree
1263 widen_bitfield (tree val, tree field, tree var)
1265 unsigned HOST_WIDE_INT var_size, field_size;
1266 tree wide_val;
1267 unsigned HOST_WIDE_INT mask;
1268 unsigned int i;
1270 /* We can only do this if the size of the type and field and VAL are
1271 all constants representable in HOST_WIDE_INT. */
1272 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var)), 1)
1273 || !host_integerp (DECL_SIZE (field), 1)
1274 || !host_integerp (val, 0))
1275 return NULL_TREE;
1277 var_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1);
1278 field_size = tree_low_cst (DECL_SIZE (field), 1);
1280 /* Give up if either the bitfield or the variable are too wide. */
1281 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
1282 return NULL_TREE;
1284 gcc_assert (var_size >= field_size);
1286 /* If the sign bit of the value is not set or the field's type is unsigned,
1287 just mask off the high order bits of the value. */
1288 if (DECL_UNSIGNED (field)
1289 || !(tree_low_cst (val, 0) & (((HOST_WIDE_INT)1) << (field_size - 1))))
1291 /* Zero extension. Build a mask with the lower 'field_size' bits
1292 set and a BIT_AND_EXPR node to clear the high order bits of
1293 the value. */
1294 for (i = 0, mask = 0; i < field_size; i++)
1295 mask |= ((HOST_WIDE_INT) 1) << i;
1297 wide_val = build2 (BIT_AND_EXPR, TREE_TYPE (var), val,
1298 build_int_cst (TREE_TYPE (var), mask));
1300 else
1302 /* Sign extension. Create a mask with the upper 'field_size'
1303 bits set and a BIT_IOR_EXPR to set the high order bits of the
1304 value. */
1305 for (i = 0, mask = 0; i < (var_size - field_size); i++)
1306 mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
1308 wide_val = build2 (BIT_IOR_EXPR, TREE_TYPE (var), val,
1309 build_int_cst (TREE_TYPE (var), mask));
1312 return fold (wide_val);
1316 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1317 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1318 is the desired result type. */
1320 static tree
1321 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1323 tree min_idx, idx, elt_offset = integer_zero_node;
1324 tree array_type, elt_type, elt_size;
1326 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1327 measured in units of the size of elements type) from that ARRAY_REF).
1328 We can't do anything if either is variable.
1330 The case we handle here is *(&A[N]+O). */
1331 if (TREE_CODE (base) == ARRAY_REF)
1333 tree low_bound = array_ref_low_bound (base);
1335 elt_offset = TREE_OPERAND (base, 1);
1336 if (TREE_CODE (low_bound) != INTEGER_CST
1337 || TREE_CODE (elt_offset) != INTEGER_CST)
1338 return NULL_TREE;
1340 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1341 base = TREE_OPERAND (base, 0);
1344 /* Ignore stupid user tricks of indexing non-array variables. */
1345 array_type = TREE_TYPE (base);
1346 if (TREE_CODE (array_type) != ARRAY_TYPE)
1347 return NULL_TREE;
1348 elt_type = TREE_TYPE (array_type);
1349 if (!lang_hooks.types_compatible_p (orig_type, elt_type))
1350 return NULL_TREE;
1352 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1353 element type (so we can use the alignment if it's not constant).
1354 Otherwise, compute the offset as an index by using a division. If the
1355 division isn't exact, then don't do anything. */
1356 elt_size = TYPE_SIZE_UNIT (elt_type);
1357 if (integer_zerop (offset))
1359 if (TREE_CODE (elt_size) != INTEGER_CST)
1360 elt_size = size_int (TYPE_ALIGN (elt_type));
1362 idx = integer_zero_node;
1364 else
1366 unsigned HOST_WIDE_INT lquo, lrem;
1367 HOST_WIDE_INT hquo, hrem;
1369 if (TREE_CODE (elt_size) != INTEGER_CST
1370 || div_and_round_double (TRUNC_DIV_EXPR, 1,
1371 TREE_INT_CST_LOW (offset),
1372 TREE_INT_CST_HIGH (offset),
1373 TREE_INT_CST_LOW (elt_size),
1374 TREE_INT_CST_HIGH (elt_size),
1375 &lquo, &hquo, &lrem, &hrem)
1376 || lrem || hrem)
1377 return NULL_TREE;
1379 idx = build_int_cst_wide (NULL_TREE, lquo, hquo);
1382 /* Assume the low bound is zero. If there is a domain type, get the
1383 low bound, if any, convert the index into that type, and add the
1384 low bound. */
1385 min_idx = integer_zero_node;
1386 if (TYPE_DOMAIN (array_type))
1388 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
1389 min_idx = TYPE_MIN_VALUE (TYPE_DOMAIN (array_type));
1390 else
1391 min_idx = fold_convert (TYPE_DOMAIN (array_type), min_idx);
1393 if (TREE_CODE (min_idx) != INTEGER_CST)
1394 return NULL_TREE;
1396 idx = fold_convert (TYPE_DOMAIN (array_type), idx);
1397 elt_offset = fold_convert (TYPE_DOMAIN (array_type), elt_offset);
1400 if (!integer_zerop (min_idx))
1401 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1402 if (!integer_zerop (elt_offset))
1403 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1405 return build (ARRAY_REF, orig_type, base, idx, min_idx,
1406 size_int (tree_low_cst (elt_size, 1)
1407 / (TYPE_ALIGN_UNIT (elt_type))));
1411 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1412 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1413 is the desired result type. */
1414 /* ??? This doesn't handle class inheritance. */
1416 static tree
1417 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1418 tree orig_type, bool base_is_ptr)
1420 tree f, t, field_type, tail_array_field, field_offset;
1422 if (TREE_CODE (record_type) != RECORD_TYPE
1423 && TREE_CODE (record_type) != UNION_TYPE
1424 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1425 return NULL_TREE;
1427 /* Short-circuit silly cases. */
1428 if (lang_hooks.types_compatible_p (record_type, orig_type))
1429 return NULL_TREE;
1431 tail_array_field = NULL_TREE;
1432 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1434 int cmp;
1436 if (TREE_CODE (f) != FIELD_DECL)
1437 continue;
1438 if (DECL_BIT_FIELD (f))
1439 continue;
1441 field_offset = byte_position (f);
1442 if (TREE_CODE (field_offset) != INTEGER_CST)
1443 continue;
1445 /* ??? Java creates "interesting" fields for representing base classes.
1446 They have no name, and have no context. With no context, we get into
1447 trouble with nonoverlapping_component_refs_p. Skip them. */
1448 if (!DECL_FIELD_CONTEXT (f))
1449 continue;
1451 /* The previous array field isn't at the end. */
1452 tail_array_field = NULL_TREE;
1454 /* Check to see if this offset overlaps with the field. */
1455 cmp = tree_int_cst_compare (field_offset, offset);
1456 if (cmp > 0)
1457 continue;
1459 field_type = TREE_TYPE (f);
1461 /* Here we exactly match the offset being checked. If the types match,
1462 then we can return that field. */
1463 if (cmp == 0
1464 && lang_hooks.types_compatible_p (orig_type, field_type))
1466 if (base_is_ptr)
1467 base = build1 (INDIRECT_REF, record_type, base);
1468 t = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
1469 return t;
1472 /* Don't care about offsets into the middle of scalars. */
1473 if (!AGGREGATE_TYPE_P (field_type))
1474 continue;
1476 /* Check for array at the end of the struct. This is often
1477 used as for flexible array members. We should be able to
1478 turn this into an array access anyway. */
1479 if (TREE_CODE (field_type) == ARRAY_TYPE)
1480 tail_array_field = f;
1482 /* Check the end of the field against the offset. */
1483 if (!DECL_SIZE_UNIT (f)
1484 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1485 continue;
1486 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1487 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1488 continue;
1490 /* If we matched, then set offset to the displacement into
1491 this field. */
1492 offset = t;
1493 goto found;
1496 if (!tail_array_field)
1497 return NULL_TREE;
1499 f = tail_array_field;
1500 field_type = TREE_TYPE (f);
1501 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1503 found:
1504 /* If we get here, we've got an aggregate field, and a possibly
1505 nonzero offset into them. Recurse and hope for a valid match. */
1506 if (base_is_ptr)
1507 base = build1 (INDIRECT_REF, record_type, base);
1508 base = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
1510 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1511 if (t)
1512 return t;
1513 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1514 orig_type, false);
1518 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1519 Return the simplified expression, or NULL if nothing could be done. */
1521 static tree
1522 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1524 tree t;
1526 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1527 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1528 are sometimes added. */
1529 base = fold (base);
1530 STRIP_NOPS (base);
1531 TREE_OPERAND (expr, 0) = base;
1533 /* One possibility is that the address reduces to a string constant. */
1534 t = fold_read_from_constant_string (expr);
1535 if (t)
1536 return t;
1538 /* Add in any offset from a PLUS_EXPR. */
1539 if (TREE_CODE (base) == PLUS_EXPR)
1541 tree offset2;
1543 offset2 = TREE_OPERAND (base, 1);
1544 if (TREE_CODE (offset2) != INTEGER_CST)
1545 return NULL_TREE;
1546 base = TREE_OPERAND (base, 0);
1548 offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
1551 if (TREE_CODE (base) == ADDR_EXPR)
1553 /* Strip the ADDR_EXPR. */
1554 base = TREE_OPERAND (base, 0);
1556 /* Fold away CONST_DECL to its value, if the type is scalar. */
1557 if (TREE_CODE (base) == CONST_DECL
1558 && is_gimple_min_invariant (DECL_INITIAL (base)))
1559 return DECL_INITIAL (base);
1561 /* Try folding *(&B+O) to B[X]. */
1562 t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr));
1563 if (t)
1564 return t;
1566 /* Try folding *(&B+O) to B.X. */
1567 t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset,
1568 TREE_TYPE (expr), false);
1569 if (t)
1570 return t;
1572 /* Fold *&B to B. We can only do this if EXPR is the same type
1573 as BASE. We can't do this if EXPR is the element type of an array
1574 and BASE is the array. */
1575 if (integer_zerop (offset)
1576 && lang_hooks.types_compatible_p (TREE_TYPE (base),
1577 TREE_TYPE (expr)))
1578 return base;
1580 else
1582 /* We can get here for out-of-range string constant accesses,
1583 such as "_"[3]. Bail out of the entire substitution search
1584 and arrange for the entire statement to be replaced by a
1585 call to __builtin_trap. In all likelyhood this will all be
1586 constant-folded away, but in the meantime we can't leave with
1587 something that get_expr_operands can't understand. */
1589 t = base;
1590 STRIP_NOPS (t);
1591 if (TREE_CODE (t) == ADDR_EXPR
1592 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1594 /* FIXME: Except that this causes problems elsewhere with dead
1595 code not being deleted, and we abort in the rtl expanders
1596 because we failed to remove some ssa_name. In the meantime,
1597 just return zero. */
1598 /* FIXME2: This condition should be signaled by
1599 fold_read_from_constant_string directly, rather than
1600 re-checking for it here. */
1601 return integer_zero_node;
1604 /* Try folding *(B+O) to B->X. Still an improvement. */
1605 if (POINTER_TYPE_P (TREE_TYPE (base)))
1607 t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)),
1608 base, offset,
1609 TREE_TYPE (expr), true);
1610 if (t)
1611 return t;
1615 /* Otherwise we had an offset that we could not simplify. */
1616 return NULL_TREE;
1620 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1622 A quaint feature extant in our address arithmetic is that there
1623 can be hidden type changes here. The type of the result need
1624 not be the same as the type of the input pointer.
1626 What we're after here is an expression of the form
1627 (T *)(&array + const)
1628 where the cast doesn't actually exist, but is implicit in the
1629 type of the PLUS_EXPR. We'd like to turn this into
1630 &array[x]
1631 which may be able to propagate further. */
1633 static tree
1634 maybe_fold_stmt_addition (tree expr)
1636 tree op0 = TREE_OPERAND (expr, 0);
1637 tree op1 = TREE_OPERAND (expr, 1);
1638 tree ptr_type = TREE_TYPE (expr);
1639 tree ptd_type;
1640 tree t;
1641 bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
1643 /* We're only interested in pointer arithmetic. */
1644 if (!POINTER_TYPE_P (ptr_type))
1645 return NULL_TREE;
1646 /* Canonicalize the integral operand to op1. */
1647 if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
1649 if (subtract)
1650 return NULL_TREE;
1651 t = op0, op0 = op1, op1 = t;
1653 /* It had better be a constant. */
1654 if (TREE_CODE (op1) != INTEGER_CST)
1655 return NULL_TREE;
1656 /* The first operand should be an ADDR_EXPR. */
1657 if (TREE_CODE (op0) != ADDR_EXPR)
1658 return NULL_TREE;
1659 op0 = TREE_OPERAND (op0, 0);
1661 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1662 the offset into it. */
1663 while (TREE_CODE (op0) == ARRAY_REF)
1665 tree array_obj = TREE_OPERAND (op0, 0);
1666 tree array_idx = TREE_OPERAND (op0, 1);
1667 tree elt_type = TREE_TYPE (op0);
1668 tree elt_size = TYPE_SIZE_UNIT (elt_type);
1669 tree min_idx;
1671 if (TREE_CODE (array_idx) != INTEGER_CST)
1672 break;
1673 if (TREE_CODE (elt_size) != INTEGER_CST)
1674 break;
1676 /* Un-bias the index by the min index of the array type. */
1677 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
1678 if (min_idx)
1680 min_idx = TYPE_MIN_VALUE (min_idx);
1681 if (min_idx)
1683 if (TREE_CODE (min_idx) != INTEGER_CST)
1684 break;
1686 array_idx = convert (TREE_TYPE (min_idx), array_idx);
1687 if (!integer_zerop (min_idx))
1688 array_idx = int_const_binop (MINUS_EXPR, array_idx,
1689 min_idx, 0);
1693 /* Convert the index to a byte offset. */
1694 array_idx = convert (sizetype, array_idx);
1695 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
1697 /* Update the operands for the next round, or for folding. */
1698 /* If we're manipulating unsigned types, then folding into negative
1699 values can produce incorrect results. Particularly if the type
1700 is smaller than the width of the pointer. */
1701 if (subtract
1702 && TYPE_UNSIGNED (TREE_TYPE (op1))
1703 && tree_int_cst_lt (array_idx, op1))
1704 return NULL;
1705 op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
1706 array_idx, op1, 0);
1707 subtract = false;
1708 op0 = array_obj;
1711 /* If we weren't able to fold the subtraction into another array reference,
1712 canonicalize the integer for passing to the array and component ref
1713 simplification functions. */
1714 if (subtract)
1716 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
1717 return NULL;
1718 op1 = fold (build1 (NEGATE_EXPR, TREE_TYPE (op1), op1));
1719 /* ??? In theory fold should always produce another integer. */
1720 if (TREE_CODE (op1) != INTEGER_CST)
1721 return NULL;
1724 ptd_type = TREE_TYPE (ptr_type);
1726 /* At which point we can try some of the same things as for indirects. */
1727 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
1728 if (!t)
1729 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
1730 ptd_type, false);
1731 if (t)
1732 t = build1 (ADDR_EXPR, ptr_type, t);
1734 return t;
1738 /* Subroutine of fold_stmt called via walk_tree. We perform several
1739 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1741 static tree
1742 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
1744 bool *changed_p = data;
1745 tree expr = *expr_p, t;
1747 /* ??? It'd be nice if walk_tree had a pre-order option. */
1748 switch (TREE_CODE (expr))
1750 case INDIRECT_REF:
1751 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1752 if (t)
1753 return t;
1754 *walk_subtrees = 0;
1756 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
1757 integer_zero_node);
1758 break;
1760 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1761 We'd only want to bother decomposing an existing ARRAY_REF if
1762 the base array is found to have another offset contained within.
1763 Otherwise we'd be wasting time. */
1765 case ADDR_EXPR:
1766 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1767 if (t)
1768 return t;
1769 *walk_subtrees = 0;
1771 /* Set TREE_INVARIANT properly so that the value is properly
1772 considered constant, and so gets propagated as expected. */
1773 if (*changed_p)
1774 recompute_tree_invarant_for_addr_expr (expr);
1775 return NULL_TREE;
1777 case PLUS_EXPR:
1778 case MINUS_EXPR:
1779 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1780 if (t)
1781 return t;
1782 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
1783 if (t)
1784 return t;
1785 *walk_subtrees = 0;
1787 t = maybe_fold_stmt_addition (expr);
1788 break;
1790 case COMPONENT_REF:
1791 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1792 if (t)
1793 return t;
1794 *walk_subtrees = 0;
1796 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
1797 We've already checked that the records are compatible, so we should
1798 come up with a set of compatible fields. */
1800 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
1801 tree expr_field = TREE_OPERAND (expr, 1);
1803 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
1805 expr_field = find_compatible_field (expr_record, expr_field);
1806 TREE_OPERAND (expr, 1) = expr_field;
1809 break;
1811 default:
1812 return NULL_TREE;
1815 if (t)
1817 *expr_p = t;
1818 *changed_p = true;
1821 return NULL_TREE;
1825 /* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
1826 follow its use-def chains. If LENGTH is not NULL and its value is not
1827 equal to the length we determine, or if we are unable to determine the
1828 length, return false. VISITED is a bitmap of visited variables. */
1830 static bool
1831 get_strlen (tree arg, tree *length, bitmap visited)
1833 tree var, def_stmt, val;
1835 if (TREE_CODE (arg) != SSA_NAME)
1837 val = c_strlen (arg, 1);
1838 if (!val)
1839 return false;
1841 if (*length && simple_cst_equal (val, *length) != 1)
1842 return false;
1844 *length = val;
1845 return true;
1848 /* If we were already here, break the infinite cycle. */
1849 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
1850 return true;
1851 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
1853 var = arg;
1854 def_stmt = SSA_NAME_DEF_STMT (var);
1856 switch (TREE_CODE (def_stmt))
1858 case MODIFY_EXPR:
1860 tree len, rhs;
1862 /* The RHS of the statement defining VAR must either have a
1863 constant length or come from another SSA_NAME with a constant
1864 length. */
1865 rhs = TREE_OPERAND (def_stmt, 1);
1866 STRIP_NOPS (rhs);
1867 if (TREE_CODE (rhs) == SSA_NAME)
1868 return get_strlen (rhs, length, visited);
1870 /* See if the RHS is a constant length. */
1871 len = c_strlen (rhs, 1);
1872 if (len)
1874 if (*length && simple_cst_equal (len, *length) != 1)
1875 return false;
1877 *length = len;
1878 return true;
1881 break;
1884 case PHI_NODE:
1886 /* All the arguments of the PHI node must have the same constant
1887 length. */
1888 int i;
1890 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
1892 tree arg = PHI_ARG_DEF (def_stmt, i);
1894 /* If this PHI has itself as an argument, we cannot
1895 determine the string length of this argument. However,
1896 if we can find a constant string length for the other
1897 PHI args then we can still be sure that this is a
1898 constant string length. So be optimistic and just
1899 continue with the next argument. */
1900 if (arg == PHI_RESULT (def_stmt))
1901 continue;
1903 if (!get_strlen (arg, length, visited))
1904 return false;
1907 return true;
1910 default:
1911 break;
1915 return false;
1919 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
1920 constant, return NULL_TREE. Otherwise, return its constant value. */
1922 static tree
1923 ccp_fold_builtin (tree stmt, tree fn)
1925 tree result, strlen_val[2];
1926 tree callee, arglist, a;
1927 int strlen_arg, i;
1928 bitmap visited;
1929 bool ignore;
1931 ignore = TREE_CODE (stmt) != MODIFY_EXPR;
1933 /* First try the generic builtin folder. If that succeeds, return the
1934 result directly. */
1935 callee = get_callee_fndecl (fn);
1936 arglist = TREE_OPERAND (fn, 1);
1937 result = fold_builtin (callee, arglist, ignore);
1938 if (result)
1940 if (ignore)
1941 STRIP_NOPS (result);
1942 return result;
1945 /* Ignore MD builtins. */
1946 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1947 return NULL_TREE;
1949 /* If the builtin could not be folded, and it has no argument list,
1950 we're done. */
1951 if (!arglist)
1952 return NULL_TREE;
1954 /* Limit the work only for builtins we know how to simplify. */
1955 switch (DECL_FUNCTION_CODE (callee))
1957 case BUILT_IN_STRLEN:
1958 case BUILT_IN_FPUTS:
1959 case BUILT_IN_FPUTS_UNLOCKED:
1960 strlen_arg = 1;
1961 break;
1962 case BUILT_IN_STRCPY:
1963 case BUILT_IN_STRNCPY:
1964 strlen_arg = 2;
1965 break;
1966 default:
1967 return NULL_TREE;
1970 /* Try to use the dataflow information gathered by the CCP process. */
1971 visited = BITMAP_ALLOC (NULL);
1973 memset (strlen_val, 0, sizeof (strlen_val));
1974 for (i = 0, a = arglist;
1975 strlen_arg;
1976 i++, strlen_arg >>= 1, a = TREE_CHAIN (a))
1977 if (strlen_arg & 1)
1979 bitmap_clear (visited);
1980 if (!get_strlen (TREE_VALUE (a), &strlen_val[i], visited))
1981 strlen_val[i] = NULL_TREE;
1984 BITMAP_FREE (visited);
1986 result = NULL_TREE;
1987 switch (DECL_FUNCTION_CODE (callee))
1989 case BUILT_IN_STRLEN:
1990 if (strlen_val[0])
1992 tree new = fold_convert (TREE_TYPE (fn), strlen_val[0]);
1994 /* If the result is not a valid gimple value, or not a cast
1995 of a valid gimple value, then we can not use the result. */
1996 if (is_gimple_val (new)
1997 || (is_gimple_cast (new)
1998 && is_gimple_val (TREE_OPERAND (new, 0))))
1999 return new;
2001 break;
2003 case BUILT_IN_STRCPY:
2004 if (strlen_val[1] && is_gimple_val (strlen_val[1]))
2006 tree fndecl = get_callee_fndecl (fn);
2007 tree arglist = TREE_OPERAND (fn, 1);
2008 result = fold_builtin_strcpy (fndecl, arglist, strlen_val[1]);
2010 break;
2012 case BUILT_IN_STRNCPY:
2013 if (strlen_val[1] && is_gimple_val (strlen_val[1]))
2015 tree fndecl = get_callee_fndecl (fn);
2016 tree arglist = TREE_OPERAND (fn, 1);
2017 result = fold_builtin_strncpy (fndecl, arglist, strlen_val[1]);
2019 break;
2021 case BUILT_IN_FPUTS:
2022 result = fold_builtin_fputs (arglist,
2023 TREE_CODE (stmt) != MODIFY_EXPR, 0,
2024 strlen_val[0]);
2025 break;
2027 case BUILT_IN_FPUTS_UNLOCKED:
2028 result = fold_builtin_fputs (arglist,
2029 TREE_CODE (stmt) != MODIFY_EXPR, 1,
2030 strlen_val[0]);
2031 break;
2033 default:
2034 gcc_unreachable ();
2037 if (result && ignore)
2038 result = fold_ignored_result (result);
2039 return result;
2043 /* Fold the statement pointed by STMT_P. In some cases, this function may
2044 replace the whole statement with a new one. Returns true iff folding
2045 makes any changes. */
2047 bool
2048 fold_stmt (tree *stmt_p)
2050 tree rhs, result, stmt;
2051 bool changed = false;
2053 stmt = *stmt_p;
2055 /* If we replaced constants and the statement makes pointer dereferences,
2056 then we may need to fold instances of *&VAR into VAR, etc. */
2057 if (walk_tree (stmt_p, fold_stmt_r, &changed, NULL))
2059 *stmt_p
2060 = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
2061 NULL);
2062 return true;
2065 rhs = get_rhs (stmt);
2066 if (!rhs)
2067 return changed;
2068 result = NULL_TREE;
2070 if (TREE_CODE (rhs) == CALL_EXPR)
2072 tree callee;
2074 /* Check for builtins that CCP can handle using information not
2075 available in the generic fold routines. */
2076 callee = get_callee_fndecl (rhs);
2077 if (callee && DECL_BUILT_IN (callee))
2078 result = ccp_fold_builtin (stmt, rhs);
2079 else
2081 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2082 here are when we've propagated the address of a decl into the
2083 object slot. */
2084 /* ??? Should perhaps do this in fold proper. However, doing it
2085 there requires that we create a new CALL_EXPR, and that requires
2086 copying EH region info to the new node. Easier to just do it
2087 here where we can just smash the call operand. */
2088 callee = TREE_OPERAND (rhs, 0);
2089 if (TREE_CODE (callee) == OBJ_TYPE_REF
2090 && lang_hooks.fold_obj_type_ref
2091 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2092 && DECL_P (TREE_OPERAND
2093 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2095 tree t;
2097 /* ??? Caution: Broken ADDR_EXPR semantics means that
2098 looking at the type of the operand of the addr_expr
2099 can yield an array type. See silly exception in
2100 check_pointer_types_r. */
2102 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2103 t = lang_hooks.fold_obj_type_ref (callee, t);
2104 if (t)
2106 TREE_OPERAND (rhs, 0) = t;
2107 changed = true;
2113 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2114 if (result == NULL_TREE)
2115 result = fold (rhs);
2117 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2118 may have been added by fold, and "useless" type conversions that might
2119 now be apparent due to propagation. */
2120 STRIP_USELESS_TYPE_CONVERSION (result);
2122 if (result != rhs)
2123 changed |= set_rhs (stmt_p, result);
2125 return changed;
2129 /* Convert EXPR into a GIMPLE value suitable for substitution on the
2130 RHS of an assignment. Insert the necessary statements before
2131 iterator *SI_P. */
2133 static tree
2134 convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr)
2136 tree_stmt_iterator ti;
2137 tree stmt = bsi_stmt (*si_p);
2138 tree tmp, stmts = NULL;
2140 push_gimplify_context ();
2141 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
2142 pop_gimplify_context (NULL);
2144 /* The replacement can expose previously unreferenced variables. */
2145 for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti))
2147 find_new_referenced_vars (tsi_stmt_ptr (ti));
2148 mark_new_vars_to_rename (tsi_stmt (ti), vars_to_rename);
2151 if (EXPR_HAS_LOCATION (stmt))
2152 annotate_all_with_locus (&stmts, EXPR_LOCATION (stmt));
2154 bsi_insert_before (si_p, stmts, BSI_SAME_STMT);
2156 return tmp;
2160 /* A simple pass that attempts to fold all builtin functions. This pass
2161 is run after we've propagated as many constants as we can. */
2163 static void
2164 execute_fold_all_builtins (void)
2166 bool cfg_changed = false;
2167 basic_block bb;
2168 FOR_EACH_BB (bb)
2170 block_stmt_iterator i;
2171 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
2173 tree *stmtp = bsi_stmt_ptr (i);
2174 tree call = get_rhs (*stmtp);
2175 tree callee, result;
2177 if (!call || TREE_CODE (call) != CALL_EXPR)
2178 continue;
2179 callee = get_callee_fndecl (call);
2180 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2181 continue;
2183 result = ccp_fold_builtin (*stmtp, call);
2184 if (!result)
2185 switch (DECL_FUNCTION_CODE (callee))
2187 case BUILT_IN_CONSTANT_P:
2188 /* Resolve __builtin_constant_p. If it hasn't been
2189 folded to integer_one_node by now, it's fairly
2190 certain that the value simply isn't constant. */
2191 result = integer_zero_node;
2192 break;
2194 default:
2195 continue;
2198 if (dump_file && (dump_flags & TDF_DETAILS))
2200 fprintf (dump_file, "Simplified\n ");
2201 print_generic_stmt (dump_file, *stmtp, dump_flags);
2204 if (!set_rhs (stmtp, result))
2206 result = convert_to_gimple_builtin (&i, result);
2207 if (result && !set_rhs (stmtp, result))
2208 abort ();
2210 modify_stmt (*stmtp);
2211 if (maybe_clean_eh_stmt (*stmtp)
2212 && tree_purge_dead_eh_edges (bb))
2213 cfg_changed = true;
2215 if (dump_file && (dump_flags & TDF_DETAILS))
2217 fprintf (dump_file, "to\n ");
2218 print_generic_stmt (dump_file, *stmtp, dump_flags);
2219 fprintf (dump_file, "\n");
2224 /* Delete unreachable blocks. */
2225 if (cfg_changed)
2226 cleanup_tree_cfg ();
2230 struct tree_opt_pass pass_fold_builtins =
2232 "fab", /* name */
2233 NULL, /* gate */
2234 execute_fold_all_builtins, /* execute */
2235 NULL, /* sub */
2236 NULL, /* next */
2237 0, /* static_pass_number */
2238 0, /* tv_id */
2239 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2240 0, /* properties_provided */
2241 0, /* properties_destroyed */
2242 0, /* todo_flags_start */
2243 TODO_dump_func
2244 | TODO_verify_ssa
2245 | TODO_rename_vars, /* todo_flags_finish */
2246 0 /* letter */