1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 2, or (at your option) any
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* Conditional constant propagation.
27 Constant propagation with conditional branches,
28 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
30 Building an Optimizing Compiler,
31 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
33 Advanced Compiler Design and Implementation,
34 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
38 #include "coretypes.h"
45 #include "basic-block.h"
50 #include "diagnostic.h"
52 #include "tree-dump.h"
53 #include "tree-flow.h"
54 #include "tree-pass.h"
55 #include "tree-ssa-propagate.h"
56 #include "langhooks.h"
59 /* Possible lattice values. */
69 /* Main structure for CCP. Contains the lattice value and, if it's a
70 constant, the constant value. */
73 latticevalue lattice_val
;
77 /* This is used to track the current value of each variable. */
78 static value
*value_vector
;
81 /* Dump lattice value VAL to file OUTF prefixed by PREFIX. */
84 dump_lattice_value (FILE *outf
, const char *prefix
, value val
)
86 switch (val
.lattice_val
)
89 fprintf (outf
, "%sUNDEFINED", prefix
);
92 fprintf (outf
, "%sVARYING", prefix
);
95 fprintf (outf
, "%sUNKNOWN_VAL", prefix
);
98 fprintf (outf
, "%sCONSTANT ", prefix
);
99 print_generic_expr (outf
, val
.const_val
, dump_flags
);
107 /* Return a default value for variable VAR using the following rules:
109 1- Function arguments are considered VARYING.
111 2- Global and static variables that are declared constant are
114 3- Any other virtually defined variable is considered UNKNOWN_VAL.
116 4- Any other value is considered UNDEFINED. This is useful when
117 considering PHI nodes. PHI arguments that are undefined do not
118 change the constant value of the PHI node, which allows for more
119 constants to be propagated. */
122 get_default_value (tree var
)
127 if (TREE_CODE (var
) == SSA_NAME
)
128 sym
= SSA_NAME_VAR (var
);
131 gcc_assert (DECL_P (var
));
135 val
.lattice_val
= UNDEFINED
;
136 val
.const_val
= NULL_TREE
;
138 if (TREE_CODE (var
) == SSA_NAME
139 && SSA_NAME_VALUE (var
)
140 && is_gimple_min_invariant (SSA_NAME_VALUE (var
)))
142 val
.lattice_val
= CONSTANT
;
143 val
.const_val
= SSA_NAME_VALUE (var
);
145 else if (TREE_CODE (sym
) == PARM_DECL
|| TREE_THIS_VOLATILE (sym
))
147 /* Function arguments and volatile variables are considered VARYING. */
148 val
.lattice_val
= VARYING
;
150 else if (TREE_STATIC (sym
))
152 /* Globals and static variables are considered UNKNOWN_VAL,
153 unless they are declared 'const'. */
154 if (TREE_READONLY (sym
)
155 && DECL_INITIAL (sym
)
156 && is_gimple_min_invariant (DECL_INITIAL (sym
)))
158 val
.lattice_val
= CONSTANT
;
159 val
.const_val
= DECL_INITIAL (sym
);
163 val
.const_val
= NULL_TREE
;
164 val
.lattice_val
= UNKNOWN_VAL
;
167 else if (!is_gimple_reg (sym
))
169 val
.const_val
= NULL_TREE
;
170 val
.lattice_val
= UNKNOWN_VAL
;
175 tree stmt
= SSA_NAME_DEF_STMT (var
);
177 if (!IS_EMPTY_STMT (stmt
))
179 code
= TREE_CODE (stmt
);
180 if (code
!= MODIFY_EXPR
&& code
!= PHI_NODE
)
181 val
.lattice_val
= VARYING
;
188 /* Get the constant value associated with variable VAR. */
195 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
197 val
= &value_vector
[SSA_NAME_VERSION (var
)];
198 if (val
->lattice_val
== UNINITIALIZED
)
199 *val
= get_default_value (var
);
205 /* Set the lattice value for variable VAR to VAL. Return true if VAL
206 is different from VAR's previous value. */
209 set_lattice_value (tree var
, value val
)
211 value
*old
= get_value (var
);
213 if (val
.lattice_val
== UNDEFINED
)
215 /* CONSTANT->UNDEFINED is never a valid state transition. */
216 gcc_assert (old
->lattice_val
!= CONSTANT
);
218 /* UNKNOWN_VAL->UNDEFINED is never a valid state transition. */
219 gcc_assert (old
->lattice_val
!= UNKNOWN_VAL
);
221 /* VARYING->UNDEFINED is generally not a valid state transition,
222 except for values which are initialized to VARYING. */
223 gcc_assert (old
->lattice_val
!= VARYING
224 || get_default_value (var
).lattice_val
== VARYING
);
226 else if (val
.lattice_val
== CONSTANT
)
227 /* VARYING -> CONSTANT is an invalid state transition, except
228 for objects which start off in a VARYING state. */
229 gcc_assert (old
->lattice_val
!= VARYING
230 || get_default_value (var
).lattice_val
== VARYING
);
232 /* If the constant for VAR has changed, then this VAR is really varying. */
233 if (old
->lattice_val
== CONSTANT
234 && val
.lattice_val
== CONSTANT
235 && !simple_cst_equal (old
->const_val
, val
.const_val
))
237 val
.lattice_val
= VARYING
;
238 val
.const_val
= NULL_TREE
;
241 if (old
->lattice_val
!= val
.lattice_val
)
243 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
245 dump_lattice_value (dump_file
, "Lattice value changed to ", val
);
246 fprintf (dump_file
, ". Adding definition to SSA edges.\n");
257 /* Set the lattice value for the variable VAR to VARYING. */
260 def_to_varying (tree var
)
263 val
.lattice_val
= VARYING
;
264 val
.const_val
= NULL_TREE
;
265 set_lattice_value (var
, val
);
269 /* Return the likely latticevalue for STMT.
271 If STMT has no operands, then return CONSTANT.
273 Else if any operands of STMT are undefined, then return UNDEFINED.
275 Else if any operands of STMT are constants, then return CONSTANT.
277 Else return VARYING. */
280 likely_value (tree stmt
)
283 int found_constant
= 0;
288 /* If the statement makes aliased loads or has volatile operands, it
289 won't fold to a constant value. */
290 ann
= stmt_ann (stmt
);
291 if (ann
->makes_aliased_loads
|| ann
->has_volatile_ops
)
294 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
295 in the presence of const and pure calls. */
296 if (get_call_expr_in (stmt
) != NULL_TREE
)
299 get_stmt_operands (stmt
);
301 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
303 value
*val
= get_value (use
);
305 if (val
->lattice_val
== UNDEFINED
)
308 if (val
->lattice_val
== CONSTANT
)
312 vuses
= VUSE_OPS (ann
);
314 if (NUM_VUSES (vuses
))
316 tree vuse
= VUSE_OP (vuses
, 0);
317 value
*val
= get_value (vuse
);
319 if (val
->lattice_val
== UNKNOWN_VAL
)
322 /* There should be no VUSE operands that are UNDEFINED. */
323 gcc_assert (val
->lattice_val
!= UNDEFINED
);
325 if (val
->lattice_val
== CONSTANT
)
329 return ((found_constant
|| (!USE_OPS (ann
) && !vuses
)) ? CONSTANT
: VARYING
);
333 /* Function indicating whether we ought to include information for VAR
334 when calculating immediate uses. */
337 need_imm_uses_for (tree var
)
339 return get_value (var
)->lattice_val
!= VARYING
;
343 /* Initialize local data structures for CCP. */
346 ccp_initialize (void)
351 value_vector
= (value
*) xmalloc (num_ssa_names
* sizeof (value
));
352 memset (value_vector
, 0, num_ssa_names
* sizeof (value
));
354 /* Set of SSA_NAMEs that are defined by a V_MAY_DEF. */
355 is_may_def
= sbitmap_alloc (num_ssa_names
);
356 sbitmap_zero (is_may_def
);
358 /* Initialize simulation flags for PHI nodes and statements. */
361 block_stmt_iterator i
;
363 /* Mark all V_MAY_DEF operands VARYING. */
364 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
366 bool is_varying
= false;
367 tree stmt
= bsi_stmt (i
);
371 get_stmt_operands (stmt
);
373 /* Get the default value for each DEF and V_MUST_DEF. */
374 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
,
375 (SSA_OP_DEF
| SSA_OP_VMUSTDEF
))
377 if (get_value (def
)->lattice_val
== VARYING
)
381 /* Mark all V_MAY_DEF operands VARYING. */
382 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_VMAYDEF
)
384 get_value (def
)->lattice_val
= VARYING
;
385 SET_BIT (is_may_def
, SSA_NAME_VERSION (def
));
388 /* Statements other than MODIFY_EXPR, COND_EXPR and
389 SWITCH_EXPR are not interesting for constant propagation.
390 Mark them VARYING. */
391 if (TREE_CODE (stmt
) != MODIFY_EXPR
392 && TREE_CODE (stmt
) != COND_EXPR
393 && TREE_CODE (stmt
) != SWITCH_EXPR
)
396 DONT_SIMULATE_AGAIN (stmt
) = is_varying
;
400 /* Now process PHI nodes. */
406 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
408 value
*val
= get_value (PHI_RESULT (phi
));
410 for (x
= 0; x
< PHI_NUM_ARGS (phi
); x
++)
412 var
= PHI_ARG_DEF (phi
, x
);
414 /* If one argument has a V_MAY_DEF, the result is
416 if (TREE_CODE (var
) == SSA_NAME
)
418 if (TEST_BIT (is_may_def
, SSA_NAME_VERSION (var
)))
420 val
->lattice_val
= VARYING
;
421 SET_BIT (is_may_def
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
427 DONT_SIMULATE_AGAIN (phi
) = (val
->lattice_val
== VARYING
);
431 sbitmap_free (is_may_def
);
433 /* Compute immediate uses for variables we care about. */
434 compute_immediate_uses (TDFA_USE_OPS
| TDFA_USE_VOPS
, need_imm_uses_for
);
438 /* Replace USE references in statement STMT with their immediate reaching
439 definition. Return true if at least one reference was replaced. If
440 REPLACED_ADDRESSES_P is given, it will be set to true if an address
441 constant was replaced. */
444 replace_uses_in (tree stmt
, bool *replaced_addresses_p
)
446 bool replaced
= false;
450 if (replaced_addresses_p
)
451 *replaced_addresses_p
= false;
453 get_stmt_operands (stmt
);
455 FOR_EACH_SSA_USE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
457 tree tuse
= USE_FROM_PTR (use
);
458 value
*val
= get_value (tuse
);
460 if (val
->lattice_val
!= CONSTANT
)
463 if (TREE_CODE (stmt
) == ASM_EXPR
464 && !may_propagate_copy_into_asm (tuse
))
467 SET_USE (use
, val
->const_val
);
470 if (POINTER_TYPE_P (TREE_TYPE (tuse
)) && replaced_addresses_p
)
471 *replaced_addresses_p
= true;
478 /* Replace the VUSE references in statement STMT with its immediate reaching
479 definition. Return true if the reference was replaced. If
480 REPLACED_ADDRESSES_P is given, it will be set to true if an address
481 constant was replaced. */
484 replace_vuse_in (tree stmt
, bool *replaced_addresses_p
)
486 bool replaced
= false;
491 if (replaced_addresses_p
)
492 *replaced_addresses_p
= false;
494 get_stmt_operands (stmt
);
496 vuses
= STMT_VUSE_OPS (stmt
);
498 if (NUM_VUSES (vuses
) != 1)
501 vuse
= VUSE_OP_PTR (vuses
, 0);
502 val
= get_value (USE_FROM_PTR (vuse
));
504 if (val
->lattice_val
== CONSTANT
505 && TREE_CODE (stmt
) == MODIFY_EXPR
506 && DECL_P (TREE_OPERAND (stmt
, 1))
507 && TREE_OPERAND (stmt
, 1) == SSA_NAME_VAR (USE_FROM_PTR (vuse
)))
509 TREE_OPERAND (stmt
, 1) = val
->const_val
;
511 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (vuse
)))
512 && replaced_addresses_p
)
513 *replaced_addresses_p
= true;
520 /* Perform final substitution and folding. After this pass the program
521 should still be in SSA form. */
524 substitute_and_fold (void)
529 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
531 "\nSubstituing constants and folding statements\n\n");
533 /* Substitute constants in every statement of every basic block. */
536 block_stmt_iterator i
;
539 /* Propagate our known constants into PHI nodes. */
540 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
544 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
547 use_operand_p orig_p
= PHI_ARG_DEF_PTR (phi
, i
);
548 tree orig
= USE_FROM_PTR (orig_p
);
550 if (! SSA_VAR_P (orig
))
553 new_val
= get_value (orig
);
554 if (new_val
->lattice_val
== CONSTANT
555 && may_propagate_copy (orig
, new_val
->const_val
))
556 SET_USE (orig_p
, new_val
->const_val
);
560 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
562 bool replaced_address
;
563 tree stmt
= bsi_stmt (i
);
565 /* Skip statements that have been folded already. */
566 if (stmt_modified_p (stmt
) || !is_exec_stmt (stmt
))
569 /* Replace the statement with its folded version and mark it
571 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
573 fprintf (dump_file
, "Line %d: replaced ", get_lineno (stmt
));
574 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
577 if (replace_uses_in (stmt
, &replaced_address
)
578 || replace_vuse_in (stmt
, &replaced_address
))
580 bool changed
= fold_stmt (bsi_stmt_ptr (i
));
583 /* If we folded a builtin function, we'll likely
584 need to rename VDEFs. */
585 if (replaced_address
|| changed
)
586 mark_new_vars_to_rename (stmt
, vars_to_rename
);
588 /* If we cleaned up EH information from the statement,
590 if (maybe_clean_eh_stmt (stmt
))
591 tree_purge_dead_eh_edges (bb
);
596 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
598 fprintf (dump_file
, " with ");
599 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
600 fprintf (dump_file
, "\n");
605 /* And transfer what we learned from VALUE_VECTOR into the
606 SSA_NAMEs themselves. This probably isn't terribly important
607 since we probably constant propagated the values to their
609 for (i
= 0; i
< num_ssa_names
; i
++)
611 tree name
= ssa_name (i
);
617 value
= get_value (name
);
618 if (value
->lattice_val
== CONSTANT
619 && is_gimple_reg (name
)
620 && is_gimple_min_invariant (value
->const_val
))
621 SSA_NAME_VALUE (name
) = value
->const_val
;
626 /* Free allocated storage. */
631 /* Perform substitutions based on the known constant values. */
632 substitute_and_fold ();
639 /* Compute the meet operator between VAL1 and VAL2:
641 any M UNDEFINED = any
642 any M VARYING = VARYING
643 any M UNKNOWN_VAL = UNKNOWN_VAL
644 Ci M Cj = Ci if (i == j)
645 Ci M Cj = VARYING if (i != j) */
647 ccp_lattice_meet (value val1
, value val2
)
651 /* any M UNDEFINED = any. */
652 if (val1
.lattice_val
== UNDEFINED
)
654 else if (val2
.lattice_val
== UNDEFINED
)
657 /* any M VARYING = VARYING. */
658 if (val1
.lattice_val
== VARYING
|| val2
.lattice_val
== VARYING
)
660 result
.lattice_val
= VARYING
;
661 result
.const_val
= NULL_TREE
;
665 /* any M UNKNOWN_VAL = UNKNOWN_VAL. */
666 if (val1
.lattice_val
== UNKNOWN_VAL
667 || val2
.lattice_val
== UNKNOWN_VAL
)
669 result
.lattice_val
= UNKNOWN_VAL
;
670 result
.const_val
= NULL_TREE
;
674 /* Ci M Cj = Ci if (i == j)
675 Ci M Cj = VARYING if (i != j) */
676 if (simple_cst_equal (val1
.const_val
, val2
.const_val
) == 1)
678 result
.lattice_val
= CONSTANT
;
679 result
.const_val
= val1
.const_val
;
683 result
.lattice_val
= VARYING
;
684 result
.const_val
= NULL_TREE
;
691 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
692 lattice values to determine PHI_NODE's lattice value. The value of a
693 PHI node is determined calling ccp_lattice_meet() with all the arguments
694 of the PHI node that are incoming via executable edges. */
696 static enum ssa_prop_result
697 ccp_visit_phi_node (tree phi
)
699 value new_val
, *old_val
;
702 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
704 fprintf (dump_file
, "\nVisiting PHI node: ");
705 print_generic_expr (dump_file
, phi
, dump_flags
);
708 old_val
= get_value (PHI_RESULT (phi
));
709 switch (old_val
->lattice_val
)
712 return SSA_PROP_NOT_INTERESTING
;
719 /* To avoid the default value of UNKNOWN_VAL overriding
720 that of its possible constant arguments, temporarily
721 set the PHI node's default lattice value to be
722 UNDEFINED. If the PHI node's old value was UNKNOWN_VAL and
723 the new value is UNDEFINED, then we prevent the invalid
724 transition by not calling set_lattice_value. */
725 new_val
.lattice_val
= UNDEFINED
;
726 new_val
.const_val
= NULL_TREE
;
731 new_val
.lattice_val
= UNDEFINED
;
732 new_val
.const_val
= NULL_TREE
;
739 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
741 /* Compute the meet operator over all the PHI arguments. */
742 edge e
= PHI_ARG_EDGE (phi
, i
);
744 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
747 "\n Argument #%d (%d -> %d %sexecutable)\n",
748 i
, e
->src
->index
, e
->dest
->index
,
749 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
752 /* If the incoming edge is executable, Compute the meet operator for
753 the existing value of the PHI node and the current PHI argument. */
754 if (e
->flags
& EDGE_EXECUTABLE
)
756 tree rdef
= PHI_ARG_DEF (phi
, i
);
757 value
*rdef_val
, val
;
759 if (is_gimple_min_invariant (rdef
))
761 val
.lattice_val
= CONSTANT
;
762 val
.const_val
= rdef
;
766 rdef_val
= get_value (rdef
);
768 new_val
= ccp_lattice_meet (new_val
, *rdef_val
);
770 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
772 fprintf (dump_file
, "\t");
773 print_generic_expr (dump_file
, rdef
, dump_flags
);
774 dump_lattice_value (dump_file
, "\tValue: ", *rdef_val
);
775 fprintf (dump_file
, "\n");
778 if (new_val
.lattice_val
== VARYING
)
783 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
785 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
786 fprintf (dump_file
, "\n\n");
789 /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
790 if (old_val
->lattice_val
== UNKNOWN_VAL
791 && new_val
.lattice_val
== UNDEFINED
)
792 return SSA_PROP_NOT_INTERESTING
;
794 /* Otherwise, make the transition to the new value. */
795 if (set_lattice_value (PHI_RESULT (phi
), new_val
))
797 if (new_val
.lattice_val
== VARYING
)
798 return SSA_PROP_VARYING
;
800 return SSA_PROP_INTERESTING
;
803 return SSA_PROP_NOT_INTERESTING
;
807 /* CCP specific front-end to the non-destructive constant folding
810 Attempt to simplify the RHS of STMT knowing that one or more
811 operands are constants.
813 If simplification is possible, return the simplified RHS,
814 otherwise return the original RHS. */
819 tree rhs
= get_rhs (stmt
);
820 enum tree_code code
= TREE_CODE (rhs
);
821 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
822 tree retval
= NULL_TREE
;
825 vuses
= STMT_VUSE_OPS (stmt
);
827 /* If the RHS is just a variable, then that variable must now have
828 a constant value that we can return directly. */
829 if (TREE_CODE (rhs
) == SSA_NAME
)
830 return get_value (rhs
)->const_val
;
831 else if (DECL_P (rhs
)
832 && NUM_VUSES (vuses
) == 1
833 && rhs
== SSA_NAME_VAR (VUSE_OP (vuses
, 0)))
834 return get_value (VUSE_OP (vuses
, 0))->const_val
;
836 /* Unary operators. Note that we know the single operand must
837 be a constant. So this should almost always return a
839 if (kind
== tcc_unary
)
841 /* Handle unary operators which can appear in GIMPLE form. */
842 tree op0
= TREE_OPERAND (rhs
, 0);
844 /* Simplify the operand down to a constant. */
845 if (TREE_CODE (op0
) == SSA_NAME
)
847 value
*val
= get_value (op0
);
848 if (val
->lattice_val
== CONSTANT
)
849 op0
= get_value (op0
)->const_val
;
852 retval
= fold_unary_to_constant (code
, TREE_TYPE (rhs
), op0
);
854 /* If we folded, but did not create an invariant, then we can not
855 use this expression. */
856 if (retval
&& ! is_gimple_min_invariant (retval
))
859 /* If we could not fold the expression, but the arguments are all
860 constants and gimple values, then build and return the new
863 In some cases the new expression is still something we can
864 use as a replacement for an argument. This happens with
865 NOP conversions of types for example.
867 In other cases the new expression can not be used as a
868 replacement for an argument (as it would create non-gimple
869 code). But the new expression can still be used to derive
871 if (! retval
&& is_gimple_min_invariant (op0
))
872 return build1 (code
, TREE_TYPE (rhs
), op0
);
875 /* Binary and comparison operators. We know one or both of the
876 operands are constants. */
877 else if (kind
== tcc_binary
878 || kind
== tcc_comparison
879 || code
== TRUTH_AND_EXPR
880 || code
== TRUTH_OR_EXPR
881 || code
== TRUTH_XOR_EXPR
)
883 /* Handle binary and comparison operators that can appear in
885 tree op0
= TREE_OPERAND (rhs
, 0);
886 tree op1
= TREE_OPERAND (rhs
, 1);
888 /* Simplify the operands down to constants when appropriate. */
889 if (TREE_CODE (op0
) == SSA_NAME
)
891 value
*val
= get_value (op0
);
892 if (val
->lattice_val
== CONSTANT
)
893 op0
= val
->const_val
;
896 if (TREE_CODE (op1
) == SSA_NAME
)
898 value
*val
= get_value (op1
);
899 if (val
->lattice_val
== CONSTANT
)
900 op1
= val
->const_val
;
903 retval
= fold_binary_to_constant (code
, TREE_TYPE (rhs
), op0
, op1
);
905 /* If we folded, but did not create an invariant, then we can not
906 use this expression. */
907 if (retval
&& ! is_gimple_min_invariant (retval
))
910 /* If we could not fold the expression, but the arguments are all
911 constants and gimple values, then build and return the new
914 In some cases the new expression is still something we can
915 use as a replacement for an argument. This happens with
916 NOP conversions of types for example.
918 In other cases the new expression can not be used as a
919 replacement for an argument (as it would create non-gimple
920 code). But the new expression can still be used to derive
923 && is_gimple_min_invariant (op0
)
924 && is_gimple_min_invariant (op1
))
925 return build (code
, TREE_TYPE (rhs
), op0
, op1
);
928 /* We may be able to fold away calls to builtin functions if their
929 arguments are constants. */
930 else if (code
== CALL_EXPR
931 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == ADDR_EXPR
932 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0))
934 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)))
936 use_optype uses
= STMT_USE_OPS (stmt
);
937 if (NUM_USES (uses
) != 0)
942 /* Preserve the original values of every operand. */
943 orig
= xmalloc (sizeof (tree
) * NUM_USES (uses
));
944 for (i
= 0; i
< NUM_USES (uses
); i
++)
945 orig
[i
] = USE_OP (uses
, i
);
947 /* Substitute operands with their values and try to fold. */
948 replace_uses_in (stmt
, NULL
);
949 retval
= fold_builtin (rhs
, false);
951 /* Restore operands to their original form. */
952 for (i
= 0; i
< NUM_USES (uses
); i
++)
953 SET_USE_OP (uses
, i
, orig
[i
]);
960 /* If we got a simplified form, see if we need to convert its type. */
962 return fold_convert (TREE_TYPE (rhs
), retval
);
964 /* No simplification was possible. */
969 /* Evaluate statement STMT. */
972 evaluate_stmt (tree stmt
)
976 latticevalue likelyvalue
= likely_value (stmt
);
978 /* If the statement is likely to have a CONSTANT result, then try
979 to fold the statement to determine the constant value. */
980 if (likelyvalue
== CONSTANT
)
981 simplified
= ccp_fold (stmt
);
982 /* If the statement is likely to have a VARYING result, then do not
983 bother folding the statement. */
984 else if (likelyvalue
== VARYING
)
985 simplified
= get_rhs (stmt
);
986 /* Otherwise the statement is likely to have an UNDEFINED value and
987 there will be nothing to do. */
989 simplified
= NULL_TREE
;
991 if (simplified
&& is_gimple_min_invariant (simplified
))
993 /* The statement produced a constant value. */
994 val
.lattice_val
= CONSTANT
;
995 val
.const_val
= simplified
;
999 /* The statement produced a nonconstant value. If the statement
1000 had undefined or virtual operands, then the result of the
1001 statement should be undefined or virtual respectively.
1002 Else the result of the statement is VARYING. */
1003 val
.lattice_val
= (likelyvalue
== UNDEFINED
? UNDEFINED
: VARYING
);
1004 val
.lattice_val
= (likelyvalue
== UNKNOWN_VAL
1005 ? UNKNOWN_VAL
: val
.lattice_val
);
1006 val
.const_val
= NULL_TREE
;
1013 /* Visit the assignment statement STMT. Set the value of its LHS to the
1014 value computed by the RHS and store LHS in *OUTPUT_P. */
1016 static enum ssa_prop_result
1017 visit_assignment (tree stmt
, tree
*output_p
)
1022 v_must_def_optype v_must_defs
;
1024 lhs
= TREE_OPERAND (stmt
, 0);
1025 rhs
= TREE_OPERAND (stmt
, 1);
1026 vuses
= STMT_VUSE_OPS (stmt
);
1027 v_must_defs
= STMT_V_MUST_DEF_OPS (stmt
);
1029 gcc_assert (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt
)) == 0);
1030 gcc_assert (NUM_V_MUST_DEFS (v_must_defs
) == 1
1031 || TREE_CODE (lhs
) == SSA_NAME
);
1033 /* We require the SSA version number of the lhs for the value_vector.
1034 Make sure we have it. */
1035 if (TREE_CODE (lhs
) != SSA_NAME
)
1037 /* If we make it here, then stmt only has one definition:
1039 lhs
= V_MUST_DEF_RESULT (v_must_defs
, 0);
1042 if (TREE_CODE (rhs
) == SSA_NAME
)
1044 /* For a simple copy operation, we copy the lattice values. */
1045 value
*nval
= get_value (rhs
);
1048 else if (DECL_P (rhs
)
1049 && NUM_VUSES (vuses
) == 1
1050 && rhs
== SSA_NAME_VAR (VUSE_OP (vuses
, 0)))
1052 /* Same as above, but the rhs is not a gimple register and yet
1053 has a known VUSE. */
1054 value
*nval
= get_value (VUSE_OP (vuses
, 0));
1058 /* Evaluate the statement. */
1059 val
= evaluate_stmt (stmt
);
1061 /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
1062 value to be a VIEW_CONVERT_EXPR of the old constant value. This is
1063 valid because a VIEW_CONVERT_EXPR is valid everywhere an operand of
1064 aggregate type is valid.
1066 ??? Also, if this was a definition of a bitfield, we need to widen
1067 the constant value into the type of the destination variable. This
1068 should not be necessary if GCC represented bitfields properly. */
1070 tree orig_lhs
= TREE_OPERAND (stmt
, 0);
1072 if (TREE_CODE (orig_lhs
) == VIEW_CONVERT_EXPR
1073 && val
.lattice_val
== CONSTANT
)
1075 val
.const_val
= build1 (VIEW_CONVERT_EXPR
,
1076 TREE_TYPE (TREE_OPERAND (orig_lhs
, 0)),
1078 orig_lhs
= TREE_OPERAND (orig_lhs
, 1);
1081 if (val
.lattice_val
== CONSTANT
1082 && TREE_CODE (orig_lhs
) == COMPONENT_REF
1083 && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs
, 1)))
1085 tree w
= widen_bitfield (val
.const_val
, TREE_OPERAND (orig_lhs
, 1),
1088 if (w
&& is_gimple_min_invariant (w
))
1092 val
.lattice_val
= VARYING
;
1093 val
.const_val
= NULL
;
1098 /* If LHS is not a gimple register, then it cannot take on an
1100 if (!is_gimple_reg (SSA_NAME_VAR (lhs
))
1101 && val
.lattice_val
== UNDEFINED
)
1102 val
.lattice_val
= UNKNOWN_VAL
;
1104 /* Set the lattice value of the statement's output. */
1105 if (set_lattice_value (lhs
, val
))
1108 if (val
.lattice_val
== VARYING
)
1109 return SSA_PROP_VARYING
;
1111 return SSA_PROP_INTERESTING
;
1114 return SSA_PROP_NOT_INTERESTING
;
1118 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1119 if it can determine which edge will be taken. Otherwise, return
1120 SSA_PROP_VARYING. */
1122 static enum ssa_prop_result
1123 visit_cond_stmt (tree stmt
, edge
*taken_edge_p
)
1128 block
= bb_for_stmt (stmt
);
1129 val
= evaluate_stmt (stmt
);
1131 /* Find which edge out of the conditional block will be taken and add it
1132 to the worklist. If no single edge can be determined statically,
1133 return SSA_PROP_VARYING to feed all the outgoing edges to the
1134 propagation engine. */
1135 *taken_edge_p
= val
.const_val
? find_taken_edge (block
, val
.const_val
) : 0;
1137 return SSA_PROP_INTERESTING
;
1139 return SSA_PROP_VARYING
;
1143 /* Evaluate statement STMT. If the statement produces an output value and
1144 its evaluation changes the lattice value of its output, return
1145 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1148 If STMT is a conditional branch and we can determine its truth
1149 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1150 value, return SSA_PROP_VARYING. */
1152 static enum ssa_prop_result
1153 ccp_visit_stmt (tree stmt
, edge
*taken_edge_p
, tree
*output_p
)
1156 v_may_def_optype v_may_defs
;
1157 v_must_def_optype v_must_defs
;
1161 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1163 fprintf (dump_file
, "\nVisiting statement: ");
1164 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
1165 fprintf (dump_file
, "\n");
1168 ann
= stmt_ann (stmt
);
1170 v_must_defs
= V_MUST_DEF_OPS (ann
);
1171 v_may_defs
= V_MAY_DEF_OPS (ann
);
1172 if (TREE_CODE (stmt
) == MODIFY_EXPR
1173 && NUM_V_MAY_DEFS (v_may_defs
) == 0
1174 && (NUM_V_MUST_DEFS (v_must_defs
) == 1
1175 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == SSA_NAME
))
1177 /* If the statement is an assignment that produces a single
1178 output value, evaluate its RHS to see if the lattice value of
1179 its output has changed. */
1180 return visit_assignment (stmt
, output_p
);
1182 else if (TREE_CODE (stmt
) == COND_EXPR
|| TREE_CODE (stmt
) == SWITCH_EXPR
)
1184 /* If STMT is a conditional branch, see if we can determine
1185 which branch will be taken. */
1186 return visit_cond_stmt (stmt
, taken_edge_p
);
1189 /* Any other kind of statement is not interesting for constant
1190 propagation and, therefore, not worth simulating. */
1191 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1192 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1194 /* Definitions made by statements other than assignments to
1195 SSA_NAMEs represent unknown modifications to their outputs.
1196 Mark them VARYING. */
1197 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_DEF
)
1198 def_to_varying (def
);
1200 /* Mark all V_MAY_DEF operands VARYING. */
1201 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_VMAYDEF
)
1202 def_to_varying (def
);
1204 return SSA_PROP_VARYING
;
1208 /* Main entry point for SSA Conditional Constant Propagation.
1210 [ DESCRIBE MAIN ALGORITHM HERE ] */
1213 execute_ssa_ccp (void)
1216 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1224 return flag_tree_ccp
!= 0;
1228 struct tree_opt_pass pass_ccp
=
1231 gate_ccp
, /* gate */
1232 execute_ssa_ccp
, /* execute */
1235 0, /* static_pass_number */
1236 TV_TREE_CCP
, /* tv_id */
1237 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
1238 0, /* properties_provided */
1239 0, /* properties_destroyed */
1240 0, /* todo_flags_start */
1241 TODO_cleanup_cfg
| TODO_dump_func
| TODO_rename_vars
1242 | TODO_ggc_collect
| TODO_verify_ssa
1243 | TODO_verify_stmts
, /* todo_flags_finish */
1248 /* Given a constant value VAL for bitfield FIELD, and a destination
1249 variable VAR, return VAL appropriately widened to fit into VAR. If
1250 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1253 widen_bitfield (tree val
, tree field
, tree var
)
1255 unsigned HOST_WIDE_INT var_size
, field_size
;
1257 unsigned HOST_WIDE_INT mask
;
1260 /* We can only do this if the size of the type and field and VAL are
1261 all constants representable in HOST_WIDE_INT. */
1262 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var
)), 1)
1263 || !host_integerp (DECL_SIZE (field
), 1)
1264 || !host_integerp (val
, 0))
1267 var_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1);
1268 field_size
= tree_low_cst (DECL_SIZE (field
), 1);
1270 /* Give up if either the bitfield or the variable are too wide. */
1271 if (field_size
> HOST_BITS_PER_WIDE_INT
|| var_size
> HOST_BITS_PER_WIDE_INT
)
1274 gcc_assert (var_size
>= field_size
);
1276 /* If the sign bit of the value is not set or the field's type is unsigned,
1277 just mask off the high order bits of the value. */
1278 if (DECL_UNSIGNED (field
)
1279 || !(tree_low_cst (val
, 0) & (((HOST_WIDE_INT
)1) << (field_size
- 1))))
1281 /* Zero extension. Build a mask with the lower 'field_size' bits
1282 set and a BIT_AND_EXPR node to clear the high order bits of
1284 for (i
= 0, mask
= 0; i
< field_size
; i
++)
1285 mask
|= ((HOST_WIDE_INT
) 1) << i
;
1287 wide_val
= build (BIT_AND_EXPR
, TREE_TYPE (var
), val
,
1288 fold_convert (TREE_TYPE (var
),
1289 build_int_cst (NULL_TREE
, mask
)));
1293 /* Sign extension. Create a mask with the upper 'field_size'
1294 bits set and a BIT_IOR_EXPR to set the high order bits of the
1296 for (i
= 0, mask
= 0; i
< (var_size
- field_size
); i
++)
1297 mask
|= ((HOST_WIDE_INT
) 1) << (var_size
- i
- 1);
1299 wide_val
= build (BIT_IOR_EXPR
, TREE_TYPE (var
), val
,
1300 fold_convert (TREE_TYPE (var
),
1301 build_int_cst (NULL_TREE
, mask
)));
1304 return fold (wide_val
);
1308 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1309 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1310 is the desired result type. */
1313 maybe_fold_offset_to_array_ref (tree base
, tree offset
, tree orig_type
)
1315 tree min_idx
, idx
, elt_offset
= integer_zero_node
;
1316 tree array_type
, elt_type
, elt_size
;
1318 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1319 measured in units of the size of elements type) from that ARRAY_REF).
1320 We can't do anything if either is variable.
1322 The case we handle here is *(&A[N]+O). */
1323 if (TREE_CODE (base
) == ARRAY_REF
)
1325 tree low_bound
= array_ref_low_bound (base
);
1327 elt_offset
= TREE_OPERAND (base
, 1);
1328 if (TREE_CODE (low_bound
) != INTEGER_CST
1329 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1332 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1333 base
= TREE_OPERAND (base
, 0);
1336 /* Ignore stupid user tricks of indexing non-array variables. */
1337 array_type
= TREE_TYPE (base
);
1338 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1340 elt_type
= TREE_TYPE (array_type
);
1341 if (!lang_hooks
.types_compatible_p (orig_type
, elt_type
))
1344 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1345 element type (so we can use the alignment if it's not constant).
1346 Otherwise, compute the offset as an index by using a division. If the
1347 division isn't exact, then don't do anything. */
1348 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1349 if (integer_zerop (offset
))
1351 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1352 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1354 idx
= integer_zero_node
;
1358 unsigned HOST_WIDE_INT lquo
, lrem
;
1359 HOST_WIDE_INT hquo
, hrem
;
1361 if (TREE_CODE (elt_size
) != INTEGER_CST
1362 || div_and_round_double (TRUNC_DIV_EXPR
, 1,
1363 TREE_INT_CST_LOW (offset
),
1364 TREE_INT_CST_HIGH (offset
),
1365 TREE_INT_CST_LOW (elt_size
),
1366 TREE_INT_CST_HIGH (elt_size
),
1367 &lquo
, &hquo
, &lrem
, &hrem
)
1371 idx
= build_int_cst_wide (NULL_TREE
, lquo
, hquo
);
1374 /* Assume the low bound is zero. If there is a domain type, get the
1375 low bound, if any, convert the index into that type, and add the
1377 min_idx
= integer_zero_node
;
1378 if (TYPE_DOMAIN (array_type
))
1380 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
)))
1381 min_idx
= TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
));
1383 min_idx
= fold_convert (TYPE_DOMAIN (array_type
), min_idx
);
1385 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1388 idx
= fold_convert (TYPE_DOMAIN (array_type
), idx
);
1389 elt_offset
= fold_convert (TYPE_DOMAIN (array_type
), elt_offset
);
1392 if (!integer_zerop (min_idx
))
1393 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1394 if (!integer_zerop (elt_offset
))
1395 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1397 return build (ARRAY_REF
, orig_type
, base
, idx
, min_idx
,
1398 size_int (tree_low_cst (elt_size
, 1)
1399 / (TYPE_ALIGN_UNIT (elt_type
))));
1403 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1404 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1405 is the desired result type. */
1406 /* ??? This doesn't handle class inheritance. */
1409 maybe_fold_offset_to_component_ref (tree record_type
, tree base
, tree offset
,
1410 tree orig_type
, bool base_is_ptr
)
1412 tree f
, t
, field_type
, tail_array_field
, field_offset
;
1414 if (TREE_CODE (record_type
) != RECORD_TYPE
1415 && TREE_CODE (record_type
) != UNION_TYPE
1416 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1419 /* Short-circuit silly cases. */
1420 if (lang_hooks
.types_compatible_p (record_type
, orig_type
))
1423 tail_array_field
= NULL_TREE
;
1424 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1428 if (TREE_CODE (f
) != FIELD_DECL
)
1430 if (DECL_BIT_FIELD (f
))
1433 field_offset
= byte_position (f
);
1434 if (TREE_CODE (field_offset
) != INTEGER_CST
)
1437 /* ??? Java creates "interesting" fields for representing base classes.
1438 They have no name, and have no context. With no context, we get into
1439 trouble with nonoverlapping_component_refs_p. Skip them. */
1440 if (!DECL_FIELD_CONTEXT (f
))
1443 /* The previous array field isn't at the end. */
1444 tail_array_field
= NULL_TREE
;
1446 /* Check to see if this offset overlaps with the field. */
1447 cmp
= tree_int_cst_compare (field_offset
, offset
);
1451 field_type
= TREE_TYPE (f
);
1454 /* Don't care about offsets into the middle of scalars. */
1455 if (!AGGREGATE_TYPE_P (field_type
))
1458 /* Check for array at the end of the struct. This is often
1459 used as for flexible array members. We should be able to
1460 turn this into an array access anyway. */
1461 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1462 tail_array_field
= f
;
1464 /* Check the end of the field against the offset. */
1465 if (!DECL_SIZE_UNIT (f
)
1466 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1468 t
= int_const_binop (MINUS_EXPR
, offset
, DECL_FIELD_OFFSET (f
), 1);
1469 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
1472 /* If we matched, then set offset to the displacement into
1477 /* Here we exactly match the offset being checked. If the types match,
1478 then we can return that field. */
1479 else if (lang_hooks
.types_compatible_p (orig_type
, field_type
))
1482 base
= build1 (INDIRECT_REF
, record_type
, base
);
1483 t
= build (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1487 /* Don't care about type-punning of scalars. */
1488 else if (!AGGREGATE_TYPE_P (field_type
))
1494 if (!tail_array_field
)
1497 f
= tail_array_field
;
1498 field_type
= TREE_TYPE (f
);
1501 /* If we get here, we've got an aggregate field, and a possibly
1502 nonzero offset into them. Recurse and hope for a valid match. */
1504 base
= build1 (INDIRECT_REF
, record_type
, base
);
1505 base
= build (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1507 t
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
);
1510 return maybe_fold_offset_to_component_ref (field_type
, base
, offset
,
1515 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1516 Return the simplified expression, or NULL if nothing could be done. */
1519 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
1523 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1524 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1525 are sometimes added. */
1528 TREE_OPERAND (expr
, 0) = base
;
1530 /* One possibility is that the address reduces to a string constant. */
1531 t
= fold_read_from_constant_string (expr
);
1535 /* Add in any offset from a PLUS_EXPR. */
1536 if (TREE_CODE (base
) == PLUS_EXPR
)
1540 offset2
= TREE_OPERAND (base
, 1);
1541 if (TREE_CODE (offset2
) != INTEGER_CST
)
1543 base
= TREE_OPERAND (base
, 0);
1545 offset
= int_const_binop (PLUS_EXPR
, offset
, offset2
, 1);
1548 if (TREE_CODE (base
) == ADDR_EXPR
)
1550 /* Strip the ADDR_EXPR. */
1551 base
= TREE_OPERAND (base
, 0);
1553 /* Fold away CONST_DECL to its value, if the type is scalar. */
1554 if (TREE_CODE (base
) == CONST_DECL
1555 && is_gimple_min_invariant (DECL_INITIAL (base
)))
1556 return DECL_INITIAL (base
);
1558 /* Try folding *(&B+O) to B[X]. */
1559 t
= maybe_fold_offset_to_array_ref (base
, offset
, TREE_TYPE (expr
));
1563 /* Try folding *(&B+O) to B.X. */
1564 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (base
), base
, offset
,
1565 TREE_TYPE (expr
), false);
1569 /* Fold *&B to B. We can only do this if EXPR is the same type
1570 as BASE. We can't do this if EXPR is the element type of an array
1571 and BASE is the array. */
1572 if (integer_zerop (offset
)
1573 && lang_hooks
.types_compatible_p (TREE_TYPE (base
),
1579 /* We can get here for out-of-range string constant accesses,
1580 such as "_"[3]. Bail out of the entire substitution search
1581 and arrange for the entire statement to be replaced by a
1582 call to __builtin_trap. In all likelyhood this will all be
1583 constant-folded away, but in the meantime we can't leave with
1584 something that get_expr_operands can't understand. */
1588 if (TREE_CODE (t
) == ADDR_EXPR
1589 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
1591 /* FIXME: Except that this causes problems elsewhere with dead
1592 code not being deleted, and we abort in the rtl expanders
1593 because we failed to remove some ssa_name. In the meantime,
1594 just return zero. */
1595 /* FIXME2: This condition should be signaled by
1596 fold_read_from_constant_string directly, rather than
1597 re-checking for it here. */
1598 return integer_zero_node
;
1601 /* Try folding *(B+O) to B->X. Still an improvement. */
1602 if (POINTER_TYPE_P (TREE_TYPE (base
)))
1604 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base
)),
1606 TREE_TYPE (expr
), true);
1612 /* Otherwise we had an offset that we could not simplify. */
1617 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1619 A quaint feature extant in our address arithmetic is that there
1620 can be hidden type changes here. The type of the result need
1621 not be the same as the type of the input pointer.
1623 What we're after here is an expression of the form
1624 (T *)(&array + const)
1625 where the cast doesn't actually exist, but is implicit in the
1626 type of the PLUS_EXPR. We'd like to turn this into
1628 which may be able to propagate further. */
1631 maybe_fold_stmt_addition (tree expr
)
1633 tree op0
= TREE_OPERAND (expr
, 0);
1634 tree op1
= TREE_OPERAND (expr
, 1);
1635 tree ptr_type
= TREE_TYPE (expr
);
1638 bool subtract
= (TREE_CODE (expr
) == MINUS_EXPR
);
1640 /* We're only interested in pointer arithmetic. */
1641 if (!POINTER_TYPE_P (ptr_type
))
1643 /* Canonicalize the integral operand to op1. */
1644 if (INTEGRAL_TYPE_P (TREE_TYPE (op0
)))
1648 t
= op0
, op0
= op1
, op1
= t
;
1650 /* It had better be a constant. */
1651 if (TREE_CODE (op1
) != INTEGER_CST
)
1653 /* The first operand should be an ADDR_EXPR. */
1654 if (TREE_CODE (op0
) != ADDR_EXPR
)
1656 op0
= TREE_OPERAND (op0
, 0);
1658 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1659 the offset into it. */
1660 while (TREE_CODE (op0
) == ARRAY_REF
)
1662 tree array_obj
= TREE_OPERAND (op0
, 0);
1663 tree array_idx
= TREE_OPERAND (op0
, 1);
1664 tree elt_type
= TREE_TYPE (op0
);
1665 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
1668 if (TREE_CODE (array_idx
) != INTEGER_CST
)
1670 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1673 /* Un-bias the index by the min index of the array type. */
1674 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
1677 min_idx
= TYPE_MIN_VALUE (min_idx
);
1680 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1683 array_idx
= convert (TREE_TYPE (min_idx
), array_idx
);
1684 if (!integer_zerop (min_idx
))
1685 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
1690 /* Convert the index to a byte offset. */
1691 array_idx
= convert (sizetype
, array_idx
);
1692 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
1694 /* Update the operands for the next round, or for folding. */
1695 /* If we're manipulating unsigned types, then folding into negative
1696 values can produce incorrect results. Particularly if the type
1697 is smaller than the width of the pointer. */
1699 && TYPE_UNSIGNED (TREE_TYPE (op1
))
1700 && tree_int_cst_lt (array_idx
, op1
))
1702 op1
= int_const_binop (subtract
? MINUS_EXPR
: PLUS_EXPR
,
1708 /* If we weren't able to fold the subtraction into another array reference,
1709 canonicalize the integer for passing to the array and component ref
1710 simplification functions. */
1713 if (TYPE_UNSIGNED (TREE_TYPE (op1
)))
1715 op1
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (op1
), op1
));
1716 /* ??? In theory fold should always produce another integer. */
1717 if (TREE_CODE (op1
) != INTEGER_CST
)
1721 ptd_type
= TREE_TYPE (ptr_type
);
1723 /* At which point we can try some of the same things as for indirects. */
1724 t
= maybe_fold_offset_to_array_ref (op0
, op1
, ptd_type
);
1726 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (op0
), op0
, op1
,
1729 t
= build1 (ADDR_EXPR
, ptr_type
, t
);
1735 /* Subroutine of fold_stmt called via walk_tree. We perform several
1736 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1739 fold_stmt_r (tree
*expr_p
, int *walk_subtrees
, void *data
)
1741 bool *changed_p
= data
;
1742 tree expr
= *expr_p
, t
;
1744 /* ??? It'd be nice if walk_tree had a pre-order option. */
1745 switch (TREE_CODE (expr
))
1748 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1753 t
= maybe_fold_stmt_indirect (expr
, TREE_OPERAND (expr
, 0),
1757 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1758 We'd only want to bother decomposing an existing ARRAY_REF if
1759 the base array is found to have another offset contained within.
1760 Otherwise we'd be wasting time. */
1763 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1768 /* Set TREE_INVARIANT properly so that the value is properly
1769 considered constant, and so gets propagated as expected. */
1771 recompute_tree_invarant_for_addr_expr (expr
);
1776 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1779 t
= walk_tree (&TREE_OPERAND (expr
, 1), fold_stmt_r
, data
, NULL
);
1784 t
= maybe_fold_stmt_addition (expr
);
1788 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1793 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
1794 We've already checked that the records are compatible, so we should
1795 come up with a set of compatible fields. */
1797 tree expr_record
= TREE_TYPE (TREE_OPERAND (expr
, 0));
1798 tree expr_field
= TREE_OPERAND (expr
, 1);
1800 if (DECL_FIELD_CONTEXT (expr_field
) != TYPE_MAIN_VARIANT (expr_record
))
1802 expr_field
= find_compatible_field (expr_record
, expr_field
);
1803 TREE_OPERAND (expr
, 1) = expr_field
;
1822 /* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
1823 follow its use-def chains. If LENGTH is not NULL and its value is not
1824 equal to the length we determine, or if we are unable to determine the
1825 length, return false. VISITED is a bitmap of visited variables. */
1828 get_strlen (tree arg
, tree
*length
, bitmap visited
)
1830 tree var
, def_stmt
, val
;
1832 if (TREE_CODE (arg
) != SSA_NAME
)
1834 val
= c_strlen (arg
, 1);
1838 if (*length
&& simple_cst_equal (val
, *length
) != 1)
1845 /* If we were already here, break the infinite cycle. */
1846 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
1848 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
1851 def_stmt
= SSA_NAME_DEF_STMT (var
);
1853 switch (TREE_CODE (def_stmt
))
1859 /* The RHS of the statement defining VAR must either have a
1860 constant length or come from another SSA_NAME with a constant
1862 rhs
= TREE_OPERAND (def_stmt
, 1);
1864 if (TREE_CODE (rhs
) == SSA_NAME
)
1865 return get_strlen (rhs
, length
, visited
);
1867 /* See if the RHS is a constant length. */
1868 len
= c_strlen (rhs
, 1);
1871 if (*length
&& simple_cst_equal (len
, *length
) != 1)
1883 /* All the arguments of the PHI node must have the same constant
1887 for (i
= 0; i
< PHI_NUM_ARGS (def_stmt
); i
++)
1889 tree arg
= PHI_ARG_DEF (def_stmt
, i
);
1891 /* If this PHI has itself as an argument, we cannot
1892 determine the string length of this argument. However,
1893 if we can find a constant string length for the other
1894 PHI args then we can still be sure that this is a
1895 constant string length. So be optimistic and just
1896 continue with the next argument. */
1897 if (arg
== PHI_RESULT (def_stmt
))
1900 if (!get_strlen (arg
, length
, visited
))
1916 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
1917 constant, return NULL_TREE. Otherwise, return its constant value. */
1920 ccp_fold_builtin (tree stmt
, tree fn
)
1922 tree result
, strlen_val
[2];
1923 tree callee
, arglist
, a
;
1928 ignore
= TREE_CODE (stmt
) != MODIFY_EXPR
;
1930 /* First try the generic builtin folder. If that succeeds, return the
1932 result
= fold_builtin (fn
, ignore
);
1936 STRIP_NOPS (result
);
1940 /* Ignore MD builtins. */
1941 callee
= get_callee_fndecl (fn
);
1942 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1945 /* If the builtin could not be folded, and it has no argument list,
1947 arglist
= TREE_OPERAND (fn
, 1);
1951 /* Limit the work only for builtins we know how to simplify. */
1952 switch (DECL_FUNCTION_CODE (callee
))
1954 case BUILT_IN_STRLEN
:
1955 case BUILT_IN_FPUTS
:
1956 case BUILT_IN_FPUTS_UNLOCKED
:
1959 case BUILT_IN_STRCPY
:
1960 case BUILT_IN_STRNCPY
:
1967 /* Try to use the dataflow information gathered by the CCP process. */
1968 visited
= BITMAP_XMALLOC ();
1970 memset (strlen_val
, 0, sizeof (strlen_val
));
1971 for (i
= 0, a
= arglist
;
1973 i
++, strlen_arg
>>= 1, a
= TREE_CHAIN (a
))
1976 bitmap_clear (visited
);
1977 if (!get_strlen (TREE_VALUE (a
), &strlen_val
[i
], visited
))
1978 strlen_val
[i
] = NULL_TREE
;
1981 BITMAP_XFREE (visited
);
1984 switch (DECL_FUNCTION_CODE (callee
))
1986 case BUILT_IN_STRLEN
:
1989 tree
new = fold_convert (TREE_TYPE (fn
), strlen_val
[0]);
1991 /* If the result is not a valid gimple value, or not a cast
1992 of a valid gimple value, then we can not use the result. */
1993 if (is_gimple_val (new)
1994 || (is_gimple_cast (new)
1995 && is_gimple_val (TREE_OPERAND (new, 0))))
2000 case BUILT_IN_STRCPY
:
2001 if (strlen_val
[1] && is_gimple_val (strlen_val
[1]))
2002 result
= fold_builtin_strcpy (fn
, strlen_val
[1]);
2005 case BUILT_IN_STRNCPY
:
2006 if (strlen_val
[1] && is_gimple_val (strlen_val
[1]))
2007 result
= fold_builtin_strncpy (fn
, strlen_val
[1]);
2010 case BUILT_IN_FPUTS
:
2011 result
= fold_builtin_fputs (arglist
,
2012 TREE_CODE (stmt
) != MODIFY_EXPR
, 0,
2016 case BUILT_IN_FPUTS_UNLOCKED
:
2017 result
= fold_builtin_fputs (arglist
,
2018 TREE_CODE (stmt
) != MODIFY_EXPR
, 1,
2026 if (result
&& ignore
)
2027 result
= fold_ignored_result (result
);
2032 /* Fold the statement pointed by STMT_P. In some cases, this function may
2033 replace the whole statement with a new one. Returns true iff folding
2034 makes any changes. */
2037 fold_stmt (tree
*stmt_p
)
2039 tree rhs
, result
, stmt
;
2040 bool changed
= false;
2044 /* If we replaced constants and the statement makes pointer dereferences,
2045 then we may need to fold instances of *&VAR into VAR, etc. */
2046 if (walk_tree (stmt_p
, fold_stmt_r
, &changed
, NULL
))
2049 = build_function_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
],
2054 rhs
= get_rhs (stmt
);
2059 if (TREE_CODE (rhs
) == CALL_EXPR
)
2063 /* Check for builtins that CCP can handle using information not
2064 available in the generic fold routines. */
2065 callee
= get_callee_fndecl (rhs
);
2066 if (callee
&& DECL_BUILT_IN (callee
))
2067 result
= ccp_fold_builtin (stmt
, rhs
);
2070 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2071 here are when we've propagated the address of a decl into the
2073 /* ??? Should perhaps do this in fold proper. However, doing it
2074 there requires that we create a new CALL_EXPR, and that requires
2075 copying EH region info to the new node. Easier to just do it
2076 here where we can just smash the call operand. */
2077 callee
= TREE_OPERAND (rhs
, 0);
2078 if (TREE_CODE (callee
) == OBJ_TYPE_REF
2079 && lang_hooks
.fold_obj_type_ref
2080 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
2081 && DECL_P (TREE_OPERAND
2082 (OBJ_TYPE_REF_OBJECT (callee
), 0)))
2086 /* ??? Caution: Broken ADDR_EXPR semantics means that
2087 looking at the type of the operand of the addr_expr
2088 can yield an array type. See silly exception in
2089 check_pointer_types_r. */
2091 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
2092 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
2095 TREE_OPERAND (rhs
, 0) = t
;
2102 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2103 if (result
== NULL_TREE
)
2104 result
= fold (rhs
);
2106 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2107 may have been added by fold, and "useless" type conversions that might
2108 now be apparent due to propagation. */
2109 STRIP_USELESS_TYPE_CONVERSION (result
);
2112 changed
|= set_rhs (stmt_p
, result
);
2118 /* Convert EXPR into a GIMPLE value suitable for substitution on the
2119 RHS of an assignment. Insert the necessary statements before
2123 convert_to_gimple_builtin (block_stmt_iterator
*si_p
, tree expr
)
2125 tree_stmt_iterator ti
;
2126 tree stmt
= bsi_stmt (*si_p
);
2127 tree tmp
, stmts
= NULL
;
2129 push_gimplify_context ();
2130 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
2131 pop_gimplify_context (NULL
);
2133 /* The replacement can expose previously unreferenced variables. */
2134 for (ti
= tsi_start (stmts
); !tsi_end_p (ti
); tsi_next (&ti
))
2136 find_new_referenced_vars (tsi_stmt_ptr (ti
));
2137 mark_new_vars_to_rename (tsi_stmt (ti
), vars_to_rename
);
2140 if (EXPR_HAS_LOCATION (stmt
))
2141 annotate_all_with_locus (&stmts
, EXPR_LOCATION (stmt
));
2143 bsi_insert_before (si_p
, stmts
, BSI_SAME_STMT
);
2149 /* A simple pass that attempts to fold all builtin functions. This pass
2150 is run after we've propagated as many constants as we can. */
2153 execute_fold_all_builtins (void)
2155 bool cfg_changed
= false;
2159 block_stmt_iterator i
;
2160 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
2162 tree
*stmtp
= bsi_stmt_ptr (i
);
2163 tree call
= get_rhs (*stmtp
);
2164 tree callee
, result
;
2166 if (!call
|| TREE_CODE (call
) != CALL_EXPR
)
2168 callee
= get_callee_fndecl (call
);
2169 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2172 result
= ccp_fold_builtin (*stmtp
, call
);
2174 switch (DECL_FUNCTION_CODE (callee
))
2176 case BUILT_IN_CONSTANT_P
:
2177 /* Resolve __builtin_constant_p. If it hasn't been
2178 folded to integer_one_node by now, it's fairly
2179 certain that the value simply isn't constant. */
2180 result
= integer_zero_node
;
2187 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2189 fprintf (dump_file
, "Simplified\n ");
2190 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2193 if (!set_rhs (stmtp
, result
))
2195 result
= convert_to_gimple_builtin (&i
, result
);
2196 if (result
&& !set_rhs (stmtp
, result
))
2199 modify_stmt (*stmtp
);
2200 if (maybe_clean_eh_stmt (*stmtp
)
2201 && tree_purge_dead_eh_edges (bb
))
2204 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2206 fprintf (dump_file
, "to\n ");
2207 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2208 fprintf (dump_file
, "\n");
2213 /* Delete unreachable blocks. */
2215 cleanup_tree_cfg ();
2219 struct tree_opt_pass pass_fold_builtins
=
2223 execute_fold_all_builtins
, /* execute */
2226 0, /* static_pass_number */
2228 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
2229 0, /* properties_provided */
2230 0, /* properties_destroyed */
2231 0, /* todo_flags_start */
2234 | TODO_rename_vars
, /* todo_flags_finish */