Mark ChangeLog
[official-gcc.git] / gcc / tree-ssa-ccp.c
bloba99813ebc776aa7e035c2a0bc723ceb1439bcd72
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 02111-1307, USA. */
24 /* Conditional constant propagation.
26 References:
28 Constant propagation with conditional branches,
29 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
31 Building an Optimizing Compiler,
32 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
34 Advanced Compiler Design and Implementation,
35 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "rtl.h"
44 #include "tm_p.h"
45 #include "ggc.h"
46 #include "basic-block.h"
47 #include "output.h"
48 #include "errors.h"
49 #include "expr.h"
50 #include "function.h"
51 #include "diagnostic.h"
52 #include "timevar.h"
53 #include "tree-dump.h"
54 #include "tree-flow.h"
55 #include "tree-pass.h"
56 #include "tree-ssa-propagate.h"
57 #include "langhooks.h"
60 /* Possible lattice values. */
61 typedef enum
63 UNINITIALIZED = 0,
64 UNDEFINED,
65 UNKNOWN_VAL,
66 CONSTANT,
67 VARYING
68 } latticevalue;
70 /* Main structure for CCP. Contains the lattice value and, if it's a
71 constant, the constant value. */
72 typedef struct
74 latticevalue lattice_val;
75 tree const_val;
76 } value;
78 /* This is used to track the current value of each variable. */
79 static value *value_vector;
82 /* Dump lattice value VAL to file OUTF prefixed by PREFIX. */
84 static void
85 dump_lattice_value (FILE *outf, const char *prefix, value val)
87 switch (val.lattice_val)
89 case UNDEFINED:
90 fprintf (outf, "%sUNDEFINED", prefix);
91 break;
92 case VARYING:
93 fprintf (outf, "%sVARYING", prefix);
94 break;
95 case UNKNOWN_VAL:
96 fprintf (outf, "%sUNKNOWN_VAL", prefix);
97 break;
98 case CONSTANT:
99 fprintf (outf, "%sCONSTANT ", prefix);
100 print_generic_expr (outf, val.const_val, dump_flags);
101 break;
102 default:
103 gcc_unreachable ();
107 /* The regular is_gimple_min_invariant does a shallow test of the object.
108 It assumes that full gimplification has happened, or will happen on the
109 object. For a value coming from DECL_INITIAL, this is not true, so we
110 have to be more strict outselves. */
112 static bool
113 ccp_decl_initial_min_invariant (tree t)
115 if (!is_gimple_min_invariant (t))
116 return false;
117 if (TREE_CODE (t) == ADDR_EXPR)
119 /* Inline and unroll is_gimple_addressable. */
120 while (1)
122 t = TREE_OPERAND (t, 0);
123 if (is_gimple_id (t))
124 return true;
125 if (!handled_component_p (t))
126 return false;
129 return true;
132 /* Return a default value for variable VAR using the following rules:
134 1- Function arguments are considered VARYING.
136 2- Global and static variables that are declared constant are
137 considered CONSTANT.
139 3- Any other virtually defined variable is considered UNKNOWN_VAL.
141 4- Any other value is considered UNDEFINED. This is useful when
142 considering PHI nodes. PHI arguments that are undefined do not
143 change the constant value of the PHI node, which allows for more
144 constants to be propagated. */
146 static value
147 get_default_value (tree var)
149 value val;
150 tree sym;
152 if (TREE_CODE (var) == SSA_NAME)
153 sym = SSA_NAME_VAR (var);
154 else
156 gcc_assert (DECL_P (var));
157 sym = var;
160 val.lattice_val = UNDEFINED;
161 val.const_val = NULL_TREE;
163 if (TREE_CODE (var) == SSA_NAME
164 && SSA_NAME_VALUE (var)
165 && is_gimple_min_invariant (SSA_NAME_VALUE (var)))
167 val.lattice_val = CONSTANT;
168 val.const_val = SSA_NAME_VALUE (var);
170 else if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
172 /* Function arguments and volatile variables are considered VARYING. */
173 val.lattice_val = VARYING;
175 else if (TREE_STATIC (sym))
177 /* Globals and static variables are considered UNKNOWN_VAL,
178 unless they are declared 'const'. */
179 if (TREE_READONLY (sym)
180 && DECL_INITIAL (sym)
181 && ccp_decl_initial_min_invariant (DECL_INITIAL (sym)))
183 val.lattice_val = CONSTANT;
184 val.const_val = DECL_INITIAL (sym);
186 else
188 val.const_val = NULL_TREE;
189 val.lattice_val = UNKNOWN_VAL;
192 else if (!is_gimple_reg (sym))
194 val.const_val = NULL_TREE;
195 val.lattice_val = UNKNOWN_VAL;
197 else
199 enum tree_code code;
200 tree stmt = SSA_NAME_DEF_STMT (var);
202 if (!IS_EMPTY_STMT (stmt))
204 code = TREE_CODE (stmt);
205 if (code != MODIFY_EXPR && code != PHI_NODE)
206 val.lattice_val = VARYING;
210 return val;
213 /* Get the constant value associated with variable VAR. */
215 static value *
216 get_value (tree var)
218 value *val;
220 gcc_assert (TREE_CODE (var) == SSA_NAME);
222 val = &value_vector[SSA_NAME_VERSION (var)];
223 if (val->lattice_val == UNINITIALIZED)
224 *val = get_default_value (var);
226 return val;
230 /* Set the lattice value for variable VAR to VAL. Return true if VAL
231 is different from VAR's previous value. */
233 static bool
234 set_lattice_value (tree var, value val)
236 value *old = get_value (var);
238 if (val.lattice_val == UNDEFINED)
240 /* CONSTANT->UNDEFINED is never a valid state transition. */
241 gcc_assert (old->lattice_val != CONSTANT);
243 /* UNKNOWN_VAL->UNDEFINED is never a valid state transition. */
244 gcc_assert (old->lattice_val != UNKNOWN_VAL);
246 /* VARYING->UNDEFINED is generally not a valid state transition,
247 except for values which are initialized to VARYING. */
248 gcc_assert (old->lattice_val != VARYING
249 || get_default_value (var).lattice_val == VARYING);
251 else if (val.lattice_val == CONSTANT)
252 /* VARYING -> CONSTANT is an invalid state transition, except
253 for objects which start off in a VARYING state. */
254 gcc_assert (old->lattice_val != VARYING
255 || get_default_value (var).lattice_val == VARYING);
257 /* If the constant for VAR has changed, then this VAR is really varying. */
258 if (old->lattice_val == CONSTANT
259 && val.lattice_val == CONSTANT
260 && !simple_cst_equal (old->const_val, val.const_val))
262 val.lattice_val = VARYING;
263 val.const_val = NULL_TREE;
266 if (old->lattice_val != val.lattice_val)
268 if (dump_file && (dump_flags & TDF_DETAILS))
270 dump_lattice_value (dump_file, "Lattice value changed to ", val);
271 fprintf (dump_file, ". Adding definition to SSA edges.\n");
274 *old = val;
275 return true;
278 return false;
282 /* Set the lattice value for the variable VAR to VARYING. */
284 static void
285 def_to_varying (tree var)
287 value val;
288 val.lattice_val = VARYING;
289 val.const_val = NULL_TREE;
290 set_lattice_value (var, val);
294 /* Return the likely latticevalue for STMT.
296 If STMT has no operands, then return CONSTANT.
298 Else if any operands of STMT are undefined, then return UNDEFINED.
300 Else if any operands of STMT are constants, then return CONSTANT.
302 Else return VARYING. */
304 static latticevalue
305 likely_value (tree stmt)
307 vuse_optype vuses;
308 int found_constant = 0;
309 stmt_ann_t ann;
310 tree use;
311 ssa_op_iter iter;
313 /* If the statement makes aliased loads or has volatile operands, it
314 won't fold to a constant value. */
315 ann = stmt_ann (stmt);
316 if (ann->makes_aliased_loads || ann->has_volatile_ops)
317 return VARYING;
319 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
320 in the presence of const and pure calls. */
321 if (get_call_expr_in (stmt) != NULL_TREE)
322 return VARYING;
324 get_stmt_operands (stmt);
326 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
328 value *val = get_value (use);
330 if (val->lattice_val == UNDEFINED)
331 return UNDEFINED;
333 if (val->lattice_val == CONSTANT)
334 found_constant = 1;
337 vuses = VUSE_OPS (ann);
339 if (NUM_VUSES (vuses))
341 tree vuse = VUSE_OP (vuses, 0);
342 value *val = get_value (vuse);
344 if (val->lattice_val == UNKNOWN_VAL)
345 return UNKNOWN_VAL;
347 /* There should be no VUSE operands that are UNDEFINED. */
348 gcc_assert (val->lattice_val != UNDEFINED);
350 if (val->lattice_val == CONSTANT)
351 found_constant = 1;
354 return ((found_constant || (!USE_OPS (ann) && !vuses)) ? CONSTANT : VARYING);
358 /* Function indicating whether we ought to include information for VAR
359 when calculating immediate uses. */
361 static bool
362 need_imm_uses_for (tree var)
364 return get_value (var)->lattice_val != VARYING;
368 /* Initialize local data structures for CCP. */
370 static void
371 ccp_initialize (void)
373 basic_block bb;
374 sbitmap is_may_def;
376 value_vector = (value *) xmalloc (num_ssa_names * sizeof (value));
377 memset (value_vector, 0, num_ssa_names * sizeof (value));
379 /* Set of SSA_NAMEs that are defined by a V_MAY_DEF. */
380 is_may_def = sbitmap_alloc (num_ssa_names);
381 sbitmap_zero (is_may_def);
383 /* Initialize simulation flags for PHI nodes and statements. */
384 FOR_EACH_BB (bb)
386 block_stmt_iterator i;
388 /* Mark all V_MAY_DEF operands VARYING. */
389 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
391 bool is_varying = false;
392 tree stmt = bsi_stmt (i);
393 ssa_op_iter iter;
394 tree def;
396 get_stmt_operands (stmt);
398 /* Get the default value for each DEF and V_MUST_DEF. */
399 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter,
400 (SSA_OP_DEF | SSA_OP_VMUSTDEF))
402 if (get_value (def)->lattice_val == VARYING)
403 is_varying = true;
406 /* Mark all V_MAY_DEF operands VARYING. */
407 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_VMAYDEF)
409 get_value (def)->lattice_val = VARYING;
410 SET_BIT (is_may_def, SSA_NAME_VERSION (def));
413 /* Statements other than MODIFY_EXPR, COND_EXPR and
414 SWITCH_EXPR are not interesting for constant propagation.
415 Mark them VARYING. */
416 if (TREE_CODE (stmt) != MODIFY_EXPR
417 && TREE_CODE (stmt) != COND_EXPR
418 && TREE_CODE (stmt) != SWITCH_EXPR)
419 is_varying = true;
421 DONT_SIMULATE_AGAIN (stmt) = is_varying;
425 /* Now process PHI nodes. */
426 FOR_EACH_BB (bb)
428 tree phi, var;
429 int x;
431 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
433 value *val = get_value (PHI_RESULT (phi));
435 for (x = 0; x < PHI_NUM_ARGS (phi); x++)
437 var = PHI_ARG_DEF (phi, x);
439 /* If one argument has a V_MAY_DEF, the result is
440 VARYING. */
441 if (TREE_CODE (var) == SSA_NAME)
443 if (TEST_BIT (is_may_def, SSA_NAME_VERSION (var)))
445 val->lattice_val = VARYING;
446 SET_BIT (is_may_def, SSA_NAME_VERSION (PHI_RESULT (phi)));
447 break;
452 DONT_SIMULATE_AGAIN (phi) = (val->lattice_val == VARYING);
456 sbitmap_free (is_may_def);
458 /* Compute immediate uses for variables we care about. */
459 compute_immediate_uses (TDFA_USE_OPS | TDFA_USE_VOPS, need_imm_uses_for);
463 /* Replace USE references in statement STMT with their immediate reaching
464 definition. Return true if at least one reference was replaced. If
465 REPLACED_ADDRESSES_P is given, it will be set to true if an address
466 constant was replaced. */
468 static bool
469 replace_uses_in (tree stmt, bool *replaced_addresses_p)
471 bool replaced = false;
472 use_operand_p use;
473 ssa_op_iter iter;
475 if (replaced_addresses_p)
476 *replaced_addresses_p = false;
478 get_stmt_operands (stmt);
480 FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
482 tree tuse = USE_FROM_PTR (use);
483 value *val = get_value (tuse);
485 if (val->lattice_val != CONSTANT)
486 continue;
488 if (TREE_CODE (stmt) == ASM_EXPR
489 && !may_propagate_copy_into_asm (tuse))
490 continue;
492 SET_USE (use, val->const_val);
494 replaced = true;
495 if (POINTER_TYPE_P (TREE_TYPE (tuse)) && replaced_addresses_p)
496 *replaced_addresses_p = true;
499 return replaced;
503 /* Replace the VUSE references in statement STMT with its immediate reaching
504 definition. Return true if the reference was replaced. If
505 REPLACED_ADDRESSES_P is given, it will be set to true if an address
506 constant was replaced. */
508 static bool
509 replace_vuse_in (tree stmt, bool *replaced_addresses_p)
511 bool replaced = false;
512 vuse_optype vuses;
513 use_operand_p vuse;
514 value *val;
516 if (replaced_addresses_p)
517 *replaced_addresses_p = false;
519 get_stmt_operands (stmt);
521 vuses = STMT_VUSE_OPS (stmt);
523 if (NUM_VUSES (vuses) != 1)
524 return false;
526 vuse = VUSE_OP_PTR (vuses, 0);
527 val = get_value (USE_FROM_PTR (vuse));
529 if (val->lattice_val == CONSTANT
530 && TREE_CODE (stmt) == MODIFY_EXPR
531 && DECL_P (TREE_OPERAND (stmt, 1))
532 && TREE_OPERAND (stmt, 1) == SSA_NAME_VAR (USE_FROM_PTR (vuse)))
534 TREE_OPERAND (stmt, 1) = val->const_val;
535 replaced = true;
536 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (vuse)))
537 && replaced_addresses_p)
538 *replaced_addresses_p = true;
541 return replaced;
545 /* Perform final substitution and folding. After this pass the program
546 should still be in SSA form. */
548 static void
549 substitute_and_fold (void)
551 basic_block bb;
552 unsigned int i;
554 if (dump_file && (dump_flags & TDF_DETAILS))
555 fprintf (dump_file,
556 "\nSubstituing constants and folding statements\n\n");
558 /* Substitute constants in every statement of every basic block. */
559 FOR_EACH_BB (bb)
561 block_stmt_iterator i;
562 tree phi;
564 /* Propagate our known constants into PHI nodes. */
565 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
567 int i;
569 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
571 value *new_val;
572 use_operand_p orig_p = PHI_ARG_DEF_PTR (phi, i);
573 tree orig = USE_FROM_PTR (orig_p);
575 if (! SSA_VAR_P (orig))
576 break;
578 new_val = get_value (orig);
579 if (new_val->lattice_val == CONSTANT
580 && may_propagate_copy (orig, new_val->const_val))
581 SET_USE (orig_p, new_val->const_val);
585 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
587 bool replaced_address;
588 tree stmt = bsi_stmt (i);
590 /* Skip statements that have been folded already. */
591 if (stmt_modified_p (stmt) || !is_exec_stmt (stmt))
592 continue;
594 /* Replace the statement with its folded version and mark it
595 folded. */
596 if (dump_file && (dump_flags & TDF_DETAILS))
598 fprintf (dump_file, "Line %d: replaced ", get_lineno (stmt));
599 print_generic_stmt (dump_file, stmt, TDF_SLIM);
602 if (replace_uses_in (stmt, &replaced_address)
603 || replace_vuse_in (stmt, &replaced_address))
605 bool changed = fold_stmt (bsi_stmt_ptr (i));
606 stmt = bsi_stmt(i);
608 /* If we folded a builtin function, we'll likely
609 need to rename VDEFs. */
610 if (replaced_address || changed)
611 mark_new_vars_to_rename (stmt, vars_to_rename);
613 /* If we cleaned up EH information from the statement,
614 remove EH edges. */
615 if (maybe_clean_eh_stmt (stmt))
616 tree_purge_dead_eh_edges (bb);
618 modify_stmt (stmt);
621 if (dump_file && (dump_flags & TDF_DETAILS))
623 fprintf (dump_file, " with ");
624 print_generic_stmt (dump_file, stmt, TDF_SLIM);
625 fprintf (dump_file, "\n");
630 /* And transfer what we learned from VALUE_VECTOR into the
631 SSA_NAMEs themselves. This probably isn't terribly important
632 since we probably constant propagated the values to their
633 use sites above. */
634 for (i = 0; i < num_ssa_names; i++)
636 tree name = ssa_name (i);
637 value *value;
639 if (!name)
640 continue;
642 value = get_value (name);
643 if (value->lattice_val == CONSTANT
644 && is_gimple_reg (name)
645 && is_gimple_min_invariant (value->const_val))
646 SSA_NAME_VALUE (name) = value->const_val;
651 /* Free allocated storage. */
653 static void
654 ccp_finalize (void)
656 /* Perform substitutions based on the known constant values. */
657 substitute_and_fold ();
659 free (value_vector);
664 /* Compute the meet operator between VAL1 and VAL2:
666 any M UNDEFINED = any
667 any M VARYING = VARYING
668 any M UNKNOWN_VAL = UNKNOWN_VAL
669 Ci M Cj = Ci if (i == j)
670 Ci M Cj = VARYING if (i != j) */
671 static value
672 ccp_lattice_meet (value val1, value val2)
674 value result;
676 /* any M UNDEFINED = any. */
677 if (val1.lattice_val == UNDEFINED)
678 return val2;
679 else if (val2.lattice_val == UNDEFINED)
680 return val1;
682 /* any M VARYING = VARYING. */
683 if (val1.lattice_val == VARYING || val2.lattice_val == VARYING)
685 result.lattice_val = VARYING;
686 result.const_val = NULL_TREE;
687 return result;
690 /* any M UNKNOWN_VAL = UNKNOWN_VAL. */
691 if (val1.lattice_val == UNKNOWN_VAL
692 || val2.lattice_val == UNKNOWN_VAL)
694 result.lattice_val = UNKNOWN_VAL;
695 result.const_val = NULL_TREE;
696 return result;
699 /* Ci M Cj = Ci if (i == j)
700 Ci M Cj = VARYING if (i != j) */
701 if (simple_cst_equal (val1.const_val, val2.const_val) == 1)
703 result.lattice_val = CONSTANT;
704 result.const_val = val1.const_val;
706 else
708 result.lattice_val = VARYING;
709 result.const_val = NULL_TREE;
712 return result;
716 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
717 lattice values to determine PHI_NODE's lattice value. The value of a
718 PHI node is determined calling ccp_lattice_meet() with all the arguments
719 of the PHI node that are incoming via executable edges. */
721 static enum ssa_prop_result
722 ccp_visit_phi_node (tree phi)
724 value new_val, *old_val;
725 int i;
727 if (dump_file && (dump_flags & TDF_DETAILS))
729 fprintf (dump_file, "\nVisiting PHI node: ");
730 print_generic_expr (dump_file, phi, dump_flags);
733 old_val = get_value (PHI_RESULT (phi));
734 switch (old_val->lattice_val)
736 case VARYING:
737 return SSA_PROP_NOT_INTERESTING;
739 case CONSTANT:
740 new_val = *old_val;
741 break;
743 case UNKNOWN_VAL:
744 /* To avoid the default value of UNKNOWN_VAL overriding
745 that of its possible constant arguments, temporarily
746 set the PHI node's default lattice value to be
747 UNDEFINED. If the PHI node's old value was UNKNOWN_VAL and
748 the new value is UNDEFINED, then we prevent the invalid
749 transition by not calling set_lattice_value. */
750 new_val.lattice_val = UNDEFINED;
751 new_val.const_val = NULL_TREE;
752 break;
754 case UNDEFINED:
755 case UNINITIALIZED:
756 new_val.lattice_val = UNDEFINED;
757 new_val.const_val = NULL_TREE;
758 break;
760 default:
761 gcc_unreachable ();
764 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
766 /* Compute the meet operator over all the PHI arguments. */
767 edge e = PHI_ARG_EDGE (phi, i);
769 if (dump_file && (dump_flags & TDF_DETAILS))
771 fprintf (dump_file,
772 "\n Argument #%d (%d -> %d %sexecutable)\n",
773 i, e->src->index, e->dest->index,
774 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
777 /* If the incoming edge is executable, Compute the meet operator for
778 the existing value of the PHI node and the current PHI argument. */
779 if (e->flags & EDGE_EXECUTABLE)
781 tree rdef = PHI_ARG_DEF (phi, i);
782 value *rdef_val, val;
784 if (is_gimple_min_invariant (rdef))
786 val.lattice_val = CONSTANT;
787 val.const_val = rdef;
788 rdef_val = &val;
790 else
791 rdef_val = get_value (rdef);
793 new_val = ccp_lattice_meet (new_val, *rdef_val);
795 if (dump_file && (dump_flags & TDF_DETAILS))
797 fprintf (dump_file, "\t");
798 print_generic_expr (dump_file, rdef, dump_flags);
799 dump_lattice_value (dump_file, "\tValue: ", *rdef_val);
800 fprintf (dump_file, "\n");
803 if (new_val.lattice_val == VARYING)
804 break;
808 if (dump_file && (dump_flags & TDF_DETAILS))
810 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
811 fprintf (dump_file, "\n\n");
814 /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
815 if (old_val->lattice_val == UNKNOWN_VAL
816 && new_val.lattice_val == UNDEFINED)
817 return SSA_PROP_NOT_INTERESTING;
819 /* Otherwise, make the transition to the new value. */
820 if (set_lattice_value (PHI_RESULT (phi), new_val))
822 if (new_val.lattice_val == VARYING)
823 return SSA_PROP_VARYING;
824 else
825 return SSA_PROP_INTERESTING;
827 else
828 return SSA_PROP_NOT_INTERESTING;
832 /* CCP specific front-end to the non-destructive constant folding
833 routines.
835 Attempt to simplify the RHS of STMT knowing that one or more
836 operands are constants.
838 If simplification is possible, return the simplified RHS,
839 otherwise return the original RHS. */
841 static tree
842 ccp_fold (tree stmt)
844 tree rhs = get_rhs (stmt);
845 enum tree_code code = TREE_CODE (rhs);
846 enum tree_code_class kind = TREE_CODE_CLASS (code);
847 tree retval = NULL_TREE;
848 vuse_optype vuses;
850 vuses = STMT_VUSE_OPS (stmt);
852 /* If the RHS is just a variable, then that variable must now have
853 a constant value that we can return directly. */
854 if (TREE_CODE (rhs) == SSA_NAME)
855 return get_value (rhs)->const_val;
856 else if (DECL_P (rhs)
857 && NUM_VUSES (vuses) == 1
858 && rhs == SSA_NAME_VAR (VUSE_OP (vuses, 0)))
859 return get_value (VUSE_OP (vuses, 0))->const_val;
861 /* Unary operators. Note that we know the single operand must
862 be a constant. So this should almost always return a
863 simplified RHS. */
864 if (kind == tcc_unary)
866 /* Handle unary operators which can appear in GIMPLE form. */
867 tree op0 = TREE_OPERAND (rhs, 0);
869 /* Simplify the operand down to a constant. */
870 if (TREE_CODE (op0) == SSA_NAME)
872 value *val = get_value (op0);
873 if (val->lattice_val == CONSTANT)
874 op0 = get_value (op0)->const_val;
877 retval = fold_unary_to_constant (code, TREE_TYPE (rhs), op0);
879 /* If we folded, but did not create an invariant, then we can not
880 use this expression. */
881 if (retval && ! is_gimple_min_invariant (retval))
882 return NULL;
884 /* If we could not fold the expression, but the arguments are all
885 constants and gimple values, then build and return the new
886 expression.
888 In some cases the new expression is still something we can
889 use as a replacement for an argument. This happens with
890 NOP conversions of types for example.
892 In other cases the new expression can not be used as a
893 replacement for an argument (as it would create non-gimple
894 code). But the new expression can still be used to derive
895 other constants. */
896 if (! retval && is_gimple_min_invariant (op0))
897 return build1 (code, TREE_TYPE (rhs), op0);
900 /* Binary and comparison operators. We know one or both of the
901 operands are constants. */
902 else if (kind == tcc_binary
903 || kind == tcc_comparison
904 || code == TRUTH_AND_EXPR
905 || code == TRUTH_OR_EXPR
906 || code == TRUTH_XOR_EXPR)
908 /* Handle binary and comparison operators that can appear in
909 GIMPLE form. */
910 tree op0 = TREE_OPERAND (rhs, 0);
911 tree op1 = TREE_OPERAND (rhs, 1);
913 /* Simplify the operands down to constants when appropriate. */
914 if (TREE_CODE (op0) == SSA_NAME)
916 value *val = get_value (op0);
917 if (val->lattice_val == CONSTANT)
918 op0 = val->const_val;
921 if (TREE_CODE (op1) == SSA_NAME)
923 value *val = get_value (op1);
924 if (val->lattice_val == CONSTANT)
925 op1 = val->const_val;
928 retval = fold_binary_to_constant (code, TREE_TYPE (rhs), op0, op1);
930 /* If we folded, but did not create an invariant, then we can not
931 use this expression. */
932 if (retval && ! is_gimple_min_invariant (retval))
933 return NULL;
935 /* If we could not fold the expression, but the arguments are all
936 constants and gimple values, then build and return the new
937 expression.
939 In some cases the new expression is still something we can
940 use as a replacement for an argument. This happens with
941 NOP conversions of types for example.
943 In other cases the new expression can not be used as a
944 replacement for an argument (as it would create non-gimple
945 code). But the new expression can still be used to derive
946 other constants. */
947 if (! retval
948 && is_gimple_min_invariant (op0)
949 && is_gimple_min_invariant (op1))
950 return build (code, TREE_TYPE (rhs), op0, op1);
953 /* We may be able to fold away calls to builtin functions if their
954 arguments are constants. */
955 else if (code == CALL_EXPR
956 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
957 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
958 == FUNCTION_DECL)
959 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
961 use_optype uses = STMT_USE_OPS (stmt);
962 if (NUM_USES (uses) != 0)
964 tree *orig;
965 size_t i;
967 /* Preserve the original values of every operand. */
968 orig = xmalloc (sizeof (tree) * NUM_USES (uses));
969 for (i = 0; i < NUM_USES (uses); i++)
970 orig[i] = USE_OP (uses, i);
972 /* Substitute operands with their values and try to fold. */
973 replace_uses_in (stmt, NULL);
974 retval = fold_builtin (rhs, false);
976 /* Restore operands to their original form. */
977 for (i = 0; i < NUM_USES (uses); i++)
978 SET_USE_OP (uses, i, orig[i]);
979 free (orig);
982 else
983 return rhs;
985 /* If we got a simplified form, see if we need to convert its type. */
986 if (retval)
987 return fold_convert (TREE_TYPE (rhs), retval);
989 /* No simplification was possible. */
990 return rhs;
994 /* Evaluate statement STMT. */
996 static value
997 evaluate_stmt (tree stmt)
999 value val;
1000 tree simplified;
1001 latticevalue likelyvalue = likely_value (stmt);
1003 /* If the statement is likely to have a CONSTANT result, then try
1004 to fold the statement to determine the constant value. */
1005 if (likelyvalue == CONSTANT)
1006 simplified = ccp_fold (stmt);
1007 /* If the statement is likely to have a VARYING result, then do not
1008 bother folding the statement. */
1009 else if (likelyvalue == VARYING)
1010 simplified = get_rhs (stmt);
1011 /* Otherwise the statement is likely to have an UNDEFINED value and
1012 there will be nothing to do. */
1013 else
1014 simplified = NULL_TREE;
1016 if (simplified && is_gimple_min_invariant (simplified))
1018 /* The statement produced a constant value. */
1019 val.lattice_val = CONSTANT;
1020 val.const_val = simplified;
1022 else
1024 /* The statement produced a nonconstant value. If the statement
1025 had undefined or virtual operands, then the result of the
1026 statement should be undefined or virtual respectively.
1027 Else the result of the statement is VARYING. */
1028 val.lattice_val = (likelyvalue == UNDEFINED ? UNDEFINED : VARYING);
1029 val.lattice_val = (likelyvalue == UNKNOWN_VAL
1030 ? UNKNOWN_VAL : val.lattice_val);
1031 val.const_val = NULL_TREE;
1034 return val;
1038 /* Visit the assignment statement STMT. Set the value of its LHS to the
1039 value computed by the RHS and store LHS in *OUTPUT_P. */
1041 static enum ssa_prop_result
1042 visit_assignment (tree stmt, tree *output_p)
1044 value val;
1045 tree lhs, rhs;
1046 vuse_optype vuses;
1047 v_must_def_optype v_must_defs;
1049 lhs = TREE_OPERAND (stmt, 0);
1050 rhs = TREE_OPERAND (stmt, 1);
1051 vuses = STMT_VUSE_OPS (stmt);
1052 v_must_defs = STMT_V_MUST_DEF_OPS (stmt);
1054 gcc_assert (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt)) == 0);
1055 gcc_assert (NUM_V_MUST_DEFS (v_must_defs) == 1
1056 || TREE_CODE (lhs) == SSA_NAME);
1058 /* We require the SSA version number of the lhs for the value_vector.
1059 Make sure we have it. */
1060 if (TREE_CODE (lhs) != SSA_NAME)
1062 /* If we make it here, then stmt only has one definition:
1063 a V_MUST_DEF. */
1064 lhs = V_MUST_DEF_RESULT (v_must_defs, 0);
1067 if (TREE_CODE (rhs) == SSA_NAME)
1069 /* For a simple copy operation, we copy the lattice values. */
1070 value *nval = get_value (rhs);
1071 val = *nval;
1073 else if (DECL_P (rhs)
1074 && NUM_VUSES (vuses) == 1
1075 && rhs == SSA_NAME_VAR (VUSE_OP (vuses, 0)))
1077 /* Same as above, but the rhs is not a gimple register and yet
1078 has a known VUSE. */
1079 value *nval = get_value (VUSE_OP (vuses, 0));
1080 val = *nval;
1082 else
1083 /* Evaluate the statement. */
1084 val = evaluate_stmt (stmt);
1086 /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
1087 value to be a VIEW_CONVERT_EXPR of the old constant value.
1089 ??? Also, if this was a definition of a bitfield, we need to widen
1090 the constant value into the type of the destination variable. This
1091 should not be necessary if GCC represented bitfields properly. */
1093 tree orig_lhs = TREE_OPERAND (stmt, 0);
1095 if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
1096 && val.lattice_val == CONSTANT)
1098 tree w = fold (build1 (VIEW_CONVERT_EXPR,
1099 TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
1100 val.const_val));
1102 orig_lhs = TREE_OPERAND (orig_lhs, 1);
1103 if (w && is_gimple_min_invariant (w))
1104 val.const_val = w;
1105 else
1107 val.lattice_val = VARYING;
1108 val.const_val = NULL;
1112 if (val.lattice_val == CONSTANT
1113 && TREE_CODE (orig_lhs) == COMPONENT_REF
1114 && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1)))
1116 tree w = widen_bitfield (val.const_val, TREE_OPERAND (orig_lhs, 1),
1117 orig_lhs);
1119 if (w && is_gimple_min_invariant (w))
1120 val.const_val = w;
1121 else
1123 val.lattice_val = VARYING;
1124 val.const_val = NULL;
1129 /* If LHS is not a gimple register, then it cannot take on an
1130 UNDEFINED value. */
1131 if (!is_gimple_reg (SSA_NAME_VAR (lhs))
1132 && val.lattice_val == UNDEFINED)
1133 val.lattice_val = UNKNOWN_VAL;
1135 /* Set the lattice value of the statement's output. */
1136 if (set_lattice_value (lhs, val))
1138 *output_p = lhs;
1139 if (val.lattice_val == VARYING)
1140 return SSA_PROP_VARYING;
1141 else
1142 return SSA_PROP_INTERESTING;
1144 else
1145 return SSA_PROP_NOT_INTERESTING;
1149 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1150 if it can determine which edge will be taken. Otherwise, return
1151 SSA_PROP_VARYING. */
1153 static enum ssa_prop_result
1154 visit_cond_stmt (tree stmt, edge *taken_edge_p)
1156 value val;
1157 basic_block block;
1159 block = bb_for_stmt (stmt);
1160 val = evaluate_stmt (stmt);
1162 /* Find which edge out of the conditional block will be taken and add it
1163 to the worklist. If no single edge can be determined statically,
1164 return SSA_PROP_VARYING to feed all the outgoing edges to the
1165 propagation engine. */
1166 *taken_edge_p = val.const_val ? find_taken_edge (block, val.const_val) : 0;
1167 if (*taken_edge_p)
1168 return SSA_PROP_INTERESTING;
1169 else
1170 return SSA_PROP_VARYING;
1174 /* Evaluate statement STMT. If the statement produces an output value and
1175 its evaluation changes the lattice value of its output, return
1176 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1177 output value.
1179 If STMT is a conditional branch and we can determine its truth
1180 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1181 value, return SSA_PROP_VARYING. */
1183 static enum ssa_prop_result
1184 ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
1186 stmt_ann_t ann;
1187 v_may_def_optype v_may_defs;
1188 v_must_def_optype v_must_defs;
1189 tree def;
1190 ssa_op_iter iter;
1192 if (dump_file && (dump_flags & TDF_DETAILS))
1194 fprintf (dump_file, "\nVisiting statement: ");
1195 print_generic_stmt (dump_file, stmt, TDF_SLIM);
1196 fprintf (dump_file, "\n");
1199 ann = stmt_ann (stmt);
1201 v_must_defs = V_MUST_DEF_OPS (ann);
1202 v_may_defs = V_MAY_DEF_OPS (ann);
1203 if (TREE_CODE (stmt) == MODIFY_EXPR
1204 && NUM_V_MAY_DEFS (v_may_defs) == 0
1205 && (NUM_V_MUST_DEFS (v_must_defs) == 1
1206 || TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME))
1208 /* If the statement is an assignment that produces a single
1209 output value, evaluate its RHS to see if the lattice value of
1210 its output has changed. */
1211 return visit_assignment (stmt, output_p);
1213 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
1215 /* If STMT is a conditional branch, see if we can determine
1216 which branch will be taken. */
1217 return visit_cond_stmt (stmt, taken_edge_p);
1220 /* Any other kind of statement is not interesting for constant
1221 propagation and, therefore, not worth simulating. */
1222 if (dump_file && (dump_flags & TDF_DETAILS))
1223 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1225 /* Definitions made by statements other than assignments to
1226 SSA_NAMEs represent unknown modifications to their outputs.
1227 Mark them VARYING. */
1228 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
1229 def_to_varying (def);
1231 /* Mark all V_MAY_DEF operands VARYING. */
1232 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_VMAYDEF)
1233 def_to_varying (def);
1235 return SSA_PROP_VARYING;
1239 /* Main entry point for SSA Conditional Constant Propagation.
1241 [ DESCRIBE MAIN ALGORITHM HERE ] */
1243 static void
1244 execute_ssa_ccp (void)
1246 ccp_initialize ();
1247 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1248 ccp_finalize ();
1252 static bool
1253 gate_ccp (void)
1255 return flag_tree_ccp != 0;
1259 struct tree_opt_pass pass_ccp =
1261 "ccp", /* name */
1262 gate_ccp, /* gate */
1263 execute_ssa_ccp, /* execute */
1264 NULL, /* sub */
1265 NULL, /* next */
1266 0, /* static_pass_number */
1267 TV_TREE_CCP, /* tv_id */
1268 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1269 0, /* properties_provided */
1270 0, /* properties_destroyed */
1271 0, /* todo_flags_start */
1272 TODO_cleanup_cfg | TODO_dump_func | TODO_rename_vars
1273 | TODO_ggc_collect | TODO_verify_ssa
1274 | TODO_verify_stmts, /* todo_flags_finish */
1275 0 /* letter */
1279 /* Given a constant value VAL for bitfield FIELD, and a destination
1280 variable VAR, return VAL appropriately widened to fit into VAR. If
1281 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1283 tree
1284 widen_bitfield (tree val, tree field, tree var)
1286 unsigned HOST_WIDE_INT var_size, field_size;
1287 tree wide_val;
1288 unsigned HOST_WIDE_INT mask;
1289 unsigned int i;
1291 /* We can only do this if the size of the type and field and VAL are
1292 all constants representable in HOST_WIDE_INT. */
1293 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var)), 1)
1294 || !host_integerp (DECL_SIZE (field), 1)
1295 || !host_integerp (val, 0))
1296 return NULL_TREE;
1298 var_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1);
1299 field_size = tree_low_cst (DECL_SIZE (field), 1);
1301 /* Give up if either the bitfield or the variable are too wide. */
1302 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
1303 return NULL_TREE;
1305 gcc_assert (var_size >= field_size);
1307 /* If the sign bit of the value is not set or the field's type is unsigned,
1308 just mask off the high order bits of the value. */
1309 if (DECL_UNSIGNED (field)
1310 || !(tree_low_cst (val, 0) & (((HOST_WIDE_INT)1) << (field_size - 1))))
1312 /* Zero extension. Build a mask with the lower 'field_size' bits
1313 set and a BIT_AND_EXPR node to clear the high order bits of
1314 the value. */
1315 for (i = 0, mask = 0; i < field_size; i++)
1316 mask |= ((HOST_WIDE_INT) 1) << i;
1318 wide_val = build2 (BIT_AND_EXPR, TREE_TYPE (var), val,
1319 build_int_cst (TREE_TYPE (var), mask));
1321 else
1323 /* Sign extension. Create a mask with the upper 'field_size'
1324 bits set and a BIT_IOR_EXPR to set the high order bits of the
1325 value. */
1326 for (i = 0, mask = 0; i < (var_size - field_size); i++)
1327 mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
1329 wide_val = build2 (BIT_IOR_EXPR, TREE_TYPE (var), val,
1330 build_int_cst (TREE_TYPE (var), mask));
1333 return fold (wide_val);
1337 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1338 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1339 is the desired result type. */
1341 static tree
1342 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1344 tree min_idx, idx, elt_offset = integer_zero_node;
1345 tree array_type, elt_type, elt_size;
1347 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1348 measured in units of the size of elements type) from that ARRAY_REF).
1349 We can't do anything if either is variable.
1351 The case we handle here is *(&A[N]+O). */
1352 if (TREE_CODE (base) == ARRAY_REF)
1354 tree low_bound = array_ref_low_bound (base);
1356 elt_offset = TREE_OPERAND (base, 1);
1357 if (TREE_CODE (low_bound) != INTEGER_CST
1358 || TREE_CODE (elt_offset) != INTEGER_CST)
1359 return NULL_TREE;
1361 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1362 base = TREE_OPERAND (base, 0);
1365 /* Ignore stupid user tricks of indexing non-array variables. */
1366 array_type = TREE_TYPE (base);
1367 if (TREE_CODE (array_type) != ARRAY_TYPE)
1368 return NULL_TREE;
1369 elt_type = TREE_TYPE (array_type);
1370 if (!lang_hooks.types_compatible_p (orig_type, elt_type))
1371 return NULL_TREE;
1373 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1374 element type (so we can use the alignment if it's not constant).
1375 Otherwise, compute the offset as an index by using a division. If the
1376 division isn't exact, then don't do anything. */
1377 elt_size = TYPE_SIZE_UNIT (elt_type);
1378 if (integer_zerop (offset))
1380 if (TREE_CODE (elt_size) != INTEGER_CST)
1381 elt_size = size_int (TYPE_ALIGN (elt_type));
1383 idx = integer_zero_node;
1385 else
1387 unsigned HOST_WIDE_INT lquo, lrem;
1388 HOST_WIDE_INT hquo, hrem;
1390 if (TREE_CODE (elt_size) != INTEGER_CST
1391 || div_and_round_double (TRUNC_DIV_EXPR, 1,
1392 TREE_INT_CST_LOW (offset),
1393 TREE_INT_CST_HIGH (offset),
1394 TREE_INT_CST_LOW (elt_size),
1395 TREE_INT_CST_HIGH (elt_size),
1396 &lquo, &hquo, &lrem, &hrem)
1397 || lrem || hrem)
1398 return NULL_TREE;
1400 idx = build_int_cst_wide (NULL_TREE, lquo, hquo);
1403 /* Assume the low bound is zero. If there is a domain type, get the
1404 low bound, if any, convert the index into that type, and add the
1405 low bound. */
1406 min_idx = integer_zero_node;
1407 if (TYPE_DOMAIN (array_type))
1409 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
1410 min_idx = TYPE_MIN_VALUE (TYPE_DOMAIN (array_type));
1411 else
1412 min_idx = fold_convert (TYPE_DOMAIN (array_type), min_idx);
1414 if (TREE_CODE (min_idx) != INTEGER_CST)
1415 return NULL_TREE;
1417 idx = fold_convert (TYPE_DOMAIN (array_type), idx);
1418 elt_offset = fold_convert (TYPE_DOMAIN (array_type), elt_offset);
1421 if (!integer_zerop (min_idx))
1422 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1423 if (!integer_zerop (elt_offset))
1424 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1426 return build (ARRAY_REF, orig_type, base, idx, min_idx,
1427 size_int (tree_low_cst (elt_size, 1)
1428 / (TYPE_ALIGN_UNIT (elt_type))));
1432 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1433 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1434 is the desired result type. */
1435 /* ??? This doesn't handle class inheritance. */
1437 static tree
1438 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1439 tree orig_type, bool base_is_ptr)
1441 tree f, t, field_type, tail_array_field, field_offset;
1443 if (TREE_CODE (record_type) != RECORD_TYPE
1444 && TREE_CODE (record_type) != UNION_TYPE
1445 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1446 return NULL_TREE;
1448 /* Short-circuit silly cases. */
1449 if (lang_hooks.types_compatible_p (record_type, orig_type))
1450 return NULL_TREE;
1452 tail_array_field = NULL_TREE;
1453 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1455 int cmp;
1457 if (TREE_CODE (f) != FIELD_DECL)
1458 continue;
1459 if (DECL_BIT_FIELD (f))
1460 continue;
1462 field_offset = byte_position (f);
1463 if (TREE_CODE (field_offset) != INTEGER_CST)
1464 continue;
1466 /* ??? Java creates "interesting" fields for representing base classes.
1467 They have no name, and have no context. With no context, we get into
1468 trouble with nonoverlapping_component_refs_p. Skip them. */
1469 if (!DECL_FIELD_CONTEXT (f))
1470 continue;
1472 /* The previous array field isn't at the end. */
1473 tail_array_field = NULL_TREE;
1475 /* Check to see if this offset overlaps with the field. */
1476 cmp = tree_int_cst_compare (field_offset, offset);
1477 if (cmp > 0)
1478 continue;
1480 field_type = TREE_TYPE (f);
1482 /* Here we exactly match the offset being checked. If the types match,
1483 then we can return that field. */
1484 if (cmp == 0
1485 && lang_hooks.types_compatible_p (orig_type, field_type))
1487 if (base_is_ptr)
1488 base = build1 (INDIRECT_REF, record_type, base);
1489 t = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
1490 return t;
1493 /* Don't care about offsets into the middle of scalars. */
1494 if (!AGGREGATE_TYPE_P (field_type))
1495 continue;
1497 /* Check for array at the end of the struct. This is often
1498 used as for flexible array members. We should be able to
1499 turn this into an array access anyway. */
1500 if (TREE_CODE (field_type) == ARRAY_TYPE)
1501 tail_array_field = f;
1503 /* Check the end of the field against the offset. */
1504 if (!DECL_SIZE_UNIT (f)
1505 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1506 continue;
1507 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1508 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1509 continue;
1511 /* If we matched, then set offset to the displacement into
1512 this field. */
1513 offset = t;
1514 goto found;
1517 if (!tail_array_field)
1518 return NULL_TREE;
1520 f = tail_array_field;
1521 field_type = TREE_TYPE (f);
1522 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1524 found:
1525 /* If we get here, we've got an aggregate field, and a possibly
1526 nonzero offset into them. Recurse and hope for a valid match. */
1527 if (base_is_ptr)
1528 base = build1 (INDIRECT_REF, record_type, base);
1529 base = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
1531 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1532 if (t)
1533 return t;
1534 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1535 orig_type, false);
1539 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1540 Return the simplified expression, or NULL if nothing could be done. */
1542 static tree
1543 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1545 tree t;
1547 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1548 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1549 are sometimes added. */
1550 base = fold (base);
1551 STRIP_NOPS (base);
1552 TREE_OPERAND (expr, 0) = base;
1554 /* One possibility is that the address reduces to a string constant. */
1555 t = fold_read_from_constant_string (expr);
1556 if (t)
1557 return t;
1559 /* Add in any offset from a PLUS_EXPR. */
1560 if (TREE_CODE (base) == PLUS_EXPR)
1562 tree offset2;
1564 offset2 = TREE_OPERAND (base, 1);
1565 if (TREE_CODE (offset2) != INTEGER_CST)
1566 return NULL_TREE;
1567 base = TREE_OPERAND (base, 0);
1569 offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
1572 if (TREE_CODE (base) == ADDR_EXPR)
1574 /* Strip the ADDR_EXPR. */
1575 base = TREE_OPERAND (base, 0);
1577 /* Fold away CONST_DECL to its value, if the type is scalar. */
1578 if (TREE_CODE (base) == CONST_DECL
1579 && ccp_decl_initial_min_invariant (DECL_INITIAL (base)))
1580 return DECL_INITIAL (base);
1582 /* Try folding *(&B+O) to B[X]. */
1583 t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr));
1584 if (t)
1585 return t;
1587 /* Try folding *(&B+O) to B.X. */
1588 t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset,
1589 TREE_TYPE (expr), false);
1590 if (t)
1591 return t;
1593 /* Fold *&B to B. We can only do this if EXPR is the same type
1594 as BASE. We can't do this if EXPR is the element type of an array
1595 and BASE is the array. */
1596 if (integer_zerop (offset)
1597 && lang_hooks.types_compatible_p (TREE_TYPE (base),
1598 TREE_TYPE (expr)))
1599 return base;
1601 else
1603 /* We can get here for out-of-range string constant accesses,
1604 such as "_"[3]. Bail out of the entire substitution search
1605 and arrange for the entire statement to be replaced by a
1606 call to __builtin_trap. In all likelyhood this will all be
1607 constant-folded away, but in the meantime we can't leave with
1608 something that get_expr_operands can't understand. */
1610 t = base;
1611 STRIP_NOPS (t);
1612 if (TREE_CODE (t) == ADDR_EXPR
1613 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1615 /* FIXME: Except that this causes problems elsewhere with dead
1616 code not being deleted, and we abort in the rtl expanders
1617 because we failed to remove some ssa_name. In the meantime,
1618 just return zero. */
1619 /* FIXME2: This condition should be signaled by
1620 fold_read_from_constant_string directly, rather than
1621 re-checking for it here. */
1622 return integer_zero_node;
1625 /* Try folding *(B+O) to B->X. Still an improvement. */
1626 if (POINTER_TYPE_P (TREE_TYPE (base)))
1628 t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)),
1629 base, offset,
1630 TREE_TYPE (expr), true);
1631 if (t)
1632 return t;
1636 /* Otherwise we had an offset that we could not simplify. */
1637 return NULL_TREE;
1641 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1643 A quaint feature extant in our address arithmetic is that there
1644 can be hidden type changes here. The type of the result need
1645 not be the same as the type of the input pointer.
1647 What we're after here is an expression of the form
1648 (T *)(&array + const)
1649 where the cast doesn't actually exist, but is implicit in the
1650 type of the PLUS_EXPR. We'd like to turn this into
1651 &array[x]
1652 which may be able to propagate further. */
1654 static tree
1655 maybe_fold_stmt_addition (tree expr)
1657 tree op0 = TREE_OPERAND (expr, 0);
1658 tree op1 = TREE_OPERAND (expr, 1);
1659 tree ptr_type = TREE_TYPE (expr);
1660 tree ptd_type;
1661 tree t;
1662 bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
1664 /* We're only interested in pointer arithmetic. */
1665 if (!POINTER_TYPE_P (ptr_type))
1666 return NULL_TREE;
1667 /* Canonicalize the integral operand to op1. */
1668 if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
1670 if (subtract)
1671 return NULL_TREE;
1672 t = op0, op0 = op1, op1 = t;
1674 /* It had better be a constant. */
1675 if (TREE_CODE (op1) != INTEGER_CST)
1676 return NULL_TREE;
1677 /* The first operand should be an ADDR_EXPR. */
1678 if (TREE_CODE (op0) != ADDR_EXPR)
1679 return NULL_TREE;
1680 op0 = TREE_OPERAND (op0, 0);
1682 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1683 the offset into it. */
1684 while (TREE_CODE (op0) == ARRAY_REF)
1686 tree array_obj = TREE_OPERAND (op0, 0);
1687 tree array_idx = TREE_OPERAND (op0, 1);
1688 tree elt_type = TREE_TYPE (op0);
1689 tree elt_size = TYPE_SIZE_UNIT (elt_type);
1690 tree min_idx;
1692 if (TREE_CODE (array_idx) != INTEGER_CST)
1693 break;
1694 if (TREE_CODE (elt_size) != INTEGER_CST)
1695 break;
1697 /* Un-bias the index by the min index of the array type. */
1698 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
1699 if (min_idx)
1701 min_idx = TYPE_MIN_VALUE (min_idx);
1702 if (min_idx)
1704 if (TREE_CODE (min_idx) != INTEGER_CST)
1705 break;
1707 array_idx = convert (TREE_TYPE (min_idx), array_idx);
1708 if (!integer_zerop (min_idx))
1709 array_idx = int_const_binop (MINUS_EXPR, array_idx,
1710 min_idx, 0);
1714 /* Convert the index to a byte offset. */
1715 array_idx = convert (sizetype, array_idx);
1716 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
1718 /* Update the operands for the next round, or for folding. */
1719 /* If we're manipulating unsigned types, then folding into negative
1720 values can produce incorrect results. Particularly if the type
1721 is smaller than the width of the pointer. */
1722 if (subtract
1723 && TYPE_UNSIGNED (TREE_TYPE (op1))
1724 && tree_int_cst_lt (array_idx, op1))
1725 return NULL;
1726 op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
1727 array_idx, op1, 0);
1728 subtract = false;
1729 op0 = array_obj;
1732 /* If we weren't able to fold the subtraction into another array reference,
1733 canonicalize the integer for passing to the array and component ref
1734 simplification functions. */
1735 if (subtract)
1737 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
1738 return NULL;
1739 op1 = fold (build1 (NEGATE_EXPR, TREE_TYPE (op1), op1));
1740 /* ??? In theory fold should always produce another integer. */
1741 if (TREE_CODE (op1) != INTEGER_CST)
1742 return NULL;
1745 ptd_type = TREE_TYPE (ptr_type);
1747 /* At which point we can try some of the same things as for indirects. */
1748 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
1749 if (!t)
1750 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
1751 ptd_type, false);
1752 if (t)
1753 t = build1 (ADDR_EXPR, ptr_type, t);
1755 return t;
1759 /* Subroutine of fold_stmt called via walk_tree. We perform several
1760 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1762 static tree
1763 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
1765 bool *changed_p = data;
1766 tree expr = *expr_p, t;
1768 /* ??? It'd be nice if walk_tree had a pre-order option. */
1769 switch (TREE_CODE (expr))
1771 case INDIRECT_REF:
1772 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1773 if (t)
1774 return t;
1775 *walk_subtrees = 0;
1777 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
1778 integer_zero_node);
1779 break;
1781 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1782 We'd only want to bother decomposing an existing ARRAY_REF if
1783 the base array is found to have another offset contained within.
1784 Otherwise we'd be wasting time. */
1786 case ADDR_EXPR:
1787 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1788 if (t)
1789 return t;
1790 *walk_subtrees = 0;
1792 /* Set TREE_INVARIANT properly so that the value is properly
1793 considered constant, and so gets propagated as expected. */
1794 if (*changed_p)
1795 recompute_tree_invarant_for_addr_expr (expr);
1796 return NULL_TREE;
1798 case PLUS_EXPR:
1799 case MINUS_EXPR:
1800 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1801 if (t)
1802 return t;
1803 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
1804 if (t)
1805 return t;
1806 *walk_subtrees = 0;
1808 t = maybe_fold_stmt_addition (expr);
1809 break;
1811 case COMPONENT_REF:
1812 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1813 if (t)
1814 return t;
1815 *walk_subtrees = 0;
1817 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
1818 We've already checked that the records are compatible, so we should
1819 come up with a set of compatible fields. */
1821 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
1822 tree expr_field = TREE_OPERAND (expr, 1);
1824 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
1826 expr_field = find_compatible_field (expr_record, expr_field);
1827 TREE_OPERAND (expr, 1) = expr_field;
1830 break;
1832 default:
1833 return NULL_TREE;
1836 if (t)
1838 *expr_p = t;
1839 *changed_p = true;
1842 return NULL_TREE;
1846 /* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
1847 follow its use-def chains. If LENGTH is not NULL and its value is not
1848 equal to the length we determine, or if we are unable to determine the
1849 length, return false. VISITED is a bitmap of visited variables. */
1851 static bool
1852 get_strlen (tree arg, tree *length, bitmap visited)
1854 tree var, def_stmt, val;
1856 if (TREE_CODE (arg) != SSA_NAME)
1858 val = c_strlen (arg, 1);
1859 if (!val)
1860 return false;
1862 if (*length && simple_cst_equal (val, *length) != 1)
1863 return false;
1865 *length = val;
1866 return true;
1869 /* If we were already here, break the infinite cycle. */
1870 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
1871 return true;
1872 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
1874 var = arg;
1875 def_stmt = SSA_NAME_DEF_STMT (var);
1877 switch (TREE_CODE (def_stmt))
1879 case MODIFY_EXPR:
1881 tree len, rhs;
1883 /* The RHS of the statement defining VAR must either have a
1884 constant length or come from another SSA_NAME with a constant
1885 length. */
1886 rhs = TREE_OPERAND (def_stmt, 1);
1887 STRIP_NOPS (rhs);
1888 if (TREE_CODE (rhs) == SSA_NAME)
1889 return get_strlen (rhs, length, visited);
1891 /* See if the RHS is a constant length. */
1892 len = c_strlen (rhs, 1);
1893 if (len)
1895 if (*length && simple_cst_equal (len, *length) != 1)
1896 return false;
1898 *length = len;
1899 return true;
1902 break;
1905 case PHI_NODE:
1907 /* All the arguments of the PHI node must have the same constant
1908 length. */
1909 int i;
1911 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
1913 tree arg = PHI_ARG_DEF (def_stmt, i);
1915 /* If this PHI has itself as an argument, we cannot
1916 determine the string length of this argument. However,
1917 if we can find a constant string length for the other
1918 PHI args then we can still be sure that this is a
1919 constant string length. So be optimistic and just
1920 continue with the next argument. */
1921 if (arg == PHI_RESULT (def_stmt))
1922 continue;
1924 if (!get_strlen (arg, length, visited))
1925 return false;
1928 return true;
1931 default:
1932 break;
1936 return false;
1940 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
1941 constant, return NULL_TREE. Otherwise, return its constant value. */
1943 static tree
1944 ccp_fold_builtin (tree stmt, tree fn)
1946 tree result, strlen_val[2];
1947 tree callee, arglist, a;
1948 int strlen_arg, i;
1949 bitmap visited;
1950 bool ignore;
1952 ignore = TREE_CODE (stmt) != MODIFY_EXPR;
1954 /* First try the generic builtin folder. If that succeeds, return the
1955 result directly. */
1956 result = fold_builtin (fn, ignore);
1957 if (result)
1959 if (ignore)
1960 STRIP_NOPS (result);
1961 return result;
1964 /* Ignore MD builtins. */
1965 callee = get_callee_fndecl (fn);
1966 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1967 return NULL_TREE;
1969 /* If the builtin could not be folded, and it has no argument list,
1970 we're done. */
1971 arglist = TREE_OPERAND (fn, 1);
1972 if (!arglist)
1973 return NULL_TREE;
1975 /* Limit the work only for builtins we know how to simplify. */
1976 switch (DECL_FUNCTION_CODE (callee))
1978 case BUILT_IN_STRLEN:
1979 case BUILT_IN_FPUTS:
1980 case BUILT_IN_FPUTS_UNLOCKED:
1981 strlen_arg = 1;
1982 break;
1983 case BUILT_IN_STRCPY:
1984 case BUILT_IN_STRNCPY:
1985 strlen_arg = 2;
1986 break;
1987 default:
1988 return NULL_TREE;
1991 /* Try to use the dataflow information gathered by the CCP process. */
1992 visited = BITMAP_ALLOC (NULL);
1994 memset (strlen_val, 0, sizeof (strlen_val));
1995 for (i = 0, a = arglist;
1996 strlen_arg;
1997 i++, strlen_arg >>= 1, a = TREE_CHAIN (a))
1998 if (strlen_arg & 1)
2000 bitmap_clear (visited);
2001 if (!get_strlen (TREE_VALUE (a), &strlen_val[i], visited))
2002 strlen_val[i] = NULL_TREE;
2005 BITMAP_FREE (visited);
2007 result = NULL_TREE;
2008 switch (DECL_FUNCTION_CODE (callee))
2010 case BUILT_IN_STRLEN:
2011 if (strlen_val[0])
2013 tree new = fold_convert (TREE_TYPE (fn), strlen_val[0]);
2015 /* If the result is not a valid gimple value, or not a cast
2016 of a valid gimple value, then we can not use the result. */
2017 if (is_gimple_val (new)
2018 || (is_gimple_cast (new)
2019 && is_gimple_val (TREE_OPERAND (new, 0))))
2020 return new;
2022 break;
2024 case BUILT_IN_STRCPY:
2025 if (strlen_val[1] && is_gimple_val (strlen_val[1]))
2026 result = fold_builtin_strcpy (fn, strlen_val[1]);
2027 break;
2029 case BUILT_IN_STRNCPY:
2030 if (strlen_val[1] && is_gimple_val (strlen_val[1]))
2031 result = fold_builtin_strncpy (fn, strlen_val[1]);
2032 break;
2034 case BUILT_IN_FPUTS:
2035 result = fold_builtin_fputs (arglist,
2036 TREE_CODE (stmt) != MODIFY_EXPR, 0,
2037 strlen_val[0]);
2038 break;
2040 case BUILT_IN_FPUTS_UNLOCKED:
2041 result = fold_builtin_fputs (arglist,
2042 TREE_CODE (stmt) != MODIFY_EXPR, 1,
2043 strlen_val[0]);
2044 break;
2046 default:
2047 gcc_unreachable ();
2050 if (result && ignore)
2051 result = fold_ignored_result (result);
2052 return result;
2056 /* Fold the statement pointed by STMT_P. In some cases, this function may
2057 replace the whole statement with a new one. Returns true iff folding
2058 makes any changes. */
2060 bool
2061 fold_stmt (tree *stmt_p)
2063 tree rhs, result, stmt;
2064 bool changed = false;
2066 stmt = *stmt_p;
2068 /* If we replaced constants and the statement makes pointer dereferences,
2069 then we may need to fold instances of *&VAR into VAR, etc. */
2070 if (walk_tree (stmt_p, fold_stmt_r, &changed, NULL))
2072 *stmt_p
2073 = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
2074 NULL);
2075 return true;
2078 rhs = get_rhs (stmt);
2079 if (!rhs)
2080 return changed;
2081 result = NULL_TREE;
2083 if (TREE_CODE (rhs) == CALL_EXPR)
2085 tree callee;
2087 /* Check for builtins that CCP can handle using information not
2088 available in the generic fold routines. */
2089 callee = get_callee_fndecl (rhs);
2090 if (callee && DECL_BUILT_IN (callee))
2091 result = ccp_fold_builtin (stmt, rhs);
2092 else
2094 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2095 here are when we've propagated the address of a decl into the
2096 object slot. */
2097 /* ??? Should perhaps do this in fold proper. However, doing it
2098 there requires that we create a new CALL_EXPR, and that requires
2099 copying EH region info to the new node. Easier to just do it
2100 here where we can just smash the call operand. */
2101 callee = TREE_OPERAND (rhs, 0);
2102 if (TREE_CODE (callee) == OBJ_TYPE_REF
2103 && lang_hooks.fold_obj_type_ref
2104 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2105 && DECL_P (TREE_OPERAND
2106 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2108 tree t;
2110 /* ??? Caution: Broken ADDR_EXPR semantics means that
2111 looking at the type of the operand of the addr_expr
2112 can yield an array type. See silly exception in
2113 check_pointer_types_r. */
2115 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2116 t = lang_hooks.fold_obj_type_ref (callee, t);
2117 if (t)
2119 TREE_OPERAND (rhs, 0) = t;
2120 changed = true;
2126 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2127 if (result == NULL_TREE)
2128 result = fold (rhs);
2130 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2131 may have been added by fold, and "useless" type conversions that might
2132 now be apparent due to propagation. */
2133 STRIP_USELESS_TYPE_CONVERSION (result);
2135 if (result != rhs)
2136 changed |= set_rhs (stmt_p, result);
2138 return changed;
2142 /* Convert EXPR into a GIMPLE value suitable for substitution on the
2143 RHS of an assignment. Insert the necessary statements before
2144 iterator *SI_P. */
2146 static tree
2147 convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr)
2149 tree_stmt_iterator ti;
2150 tree stmt = bsi_stmt (*si_p);
2151 tree tmp, stmts = NULL;
2153 push_gimplify_context ();
2154 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
2155 pop_gimplify_context (NULL);
2157 /* The replacement can expose previously unreferenced variables. */
2158 for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti))
2160 find_new_referenced_vars (tsi_stmt_ptr (ti));
2161 mark_new_vars_to_rename (tsi_stmt (ti), vars_to_rename);
2164 if (EXPR_HAS_LOCATION (stmt))
2165 annotate_all_with_locus (&stmts, EXPR_LOCATION (stmt));
2167 bsi_insert_before (si_p, stmts, BSI_SAME_STMT);
2169 return tmp;
2173 /* A simple pass that attempts to fold all builtin functions. This pass
2174 is run after we've propagated as many constants as we can. */
2176 static void
2177 execute_fold_all_builtins (void)
2179 bool cfg_changed = false;
2180 basic_block bb;
2181 FOR_EACH_BB (bb)
2183 block_stmt_iterator i;
2184 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
2186 tree *stmtp = bsi_stmt_ptr (i);
2187 tree call = get_rhs (*stmtp);
2188 tree callee, result;
2190 if (!call || TREE_CODE (call) != CALL_EXPR)
2191 continue;
2192 callee = get_callee_fndecl (call);
2193 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2194 continue;
2196 result = ccp_fold_builtin (*stmtp, call);
2197 if (!result)
2198 switch (DECL_FUNCTION_CODE (callee))
2200 case BUILT_IN_CONSTANT_P:
2201 /* Resolve __builtin_constant_p. If it hasn't been
2202 folded to integer_one_node by now, it's fairly
2203 certain that the value simply isn't constant. */
2204 result = integer_zero_node;
2205 break;
2207 default:
2208 continue;
2211 if (dump_file && (dump_flags & TDF_DETAILS))
2213 fprintf (dump_file, "Simplified\n ");
2214 print_generic_stmt (dump_file, *stmtp, dump_flags);
2217 if (!set_rhs (stmtp, result))
2219 result = convert_to_gimple_builtin (&i, result);
2220 if (result && !set_rhs (stmtp, result))
2221 abort ();
2223 modify_stmt (*stmtp);
2224 if (maybe_clean_eh_stmt (*stmtp)
2225 && tree_purge_dead_eh_edges (bb))
2226 cfg_changed = true;
2228 if (dump_file && (dump_flags & TDF_DETAILS))
2230 fprintf (dump_file, "to\n ");
2231 print_generic_stmt (dump_file, *stmtp, dump_flags);
2232 fprintf (dump_file, "\n");
2237 /* Delete unreachable blocks. */
2238 if (cfg_changed)
2239 cleanup_tree_cfg ();
2243 struct tree_opt_pass pass_fold_builtins =
2245 "fab", /* name */
2246 NULL, /* gate */
2247 execute_fold_all_builtins, /* execute */
2248 NULL, /* sub */
2249 NULL, /* next */
2250 0, /* static_pass_number */
2251 0, /* tv_id */
2252 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2253 0, /* properties_provided */
2254 0, /* properties_destroyed */
2255 0, /* todo_flags_start */
2256 TODO_dump_func
2257 | TODO_verify_ssa
2258 | TODO_rename_vars, /* todo_flags_finish */
2259 0 /* letter */