1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 2, or (at your option) any
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* Conditional constant propagation.
27 Constant propagation with conditional branches,
28 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
30 Building an Optimizing Compiler,
31 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
33 Advanced Compiler Design and Implementation,
34 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
38 #include "coretypes.h"
45 #include "basic-block.h"
50 #include "diagnostic.h"
52 #include "tree-dump.h"
53 #include "tree-flow.h"
54 #include "tree-pass.h"
55 #include "tree-ssa-propagate.h"
56 #include "langhooks.h"
59 /* Possible lattice values. */
69 /* Main structure for CCP. Contains the lattice value and, if it's a
70 constant, the constant value. */
73 latticevalue lattice_val
;
77 /* This is used to track the current value of each variable. */
78 static value
*value_vector
;
81 /* Dump lattice value VAL to file OUTF prefixed by PREFIX. */
84 dump_lattice_value (FILE *outf
, const char *prefix
, value val
)
86 switch (val
.lattice_val
)
89 fprintf (outf
, "%sUNDEFINED", prefix
);
92 fprintf (outf
, "%sVARYING", prefix
);
95 fprintf (outf
, "%sUNKNOWN_VAL", prefix
);
98 fprintf (outf
, "%sCONSTANT ", prefix
);
99 print_generic_expr (outf
, val
.const_val
, dump_flags
);
107 /* Return a default value for variable VAR using the following rules:
109 1- Function arguments are considered VARYING.
111 2- Global and static variables that are declared constant are
114 3- Any other virtually defined variable is considered UNKNOWN_VAL.
116 4- Any other value is considered UNDEFINED. This is useful when
117 considering PHI nodes. PHI arguments that are undefined do not
118 change the constant value of the PHI node, which allows for more
119 constants to be propagated. */
122 get_default_value (tree var
)
127 if (TREE_CODE (var
) == SSA_NAME
)
128 sym
= SSA_NAME_VAR (var
);
131 gcc_assert (DECL_P (var
));
135 val
.lattice_val
= UNDEFINED
;
136 val
.const_val
= NULL_TREE
;
138 if (TREE_CODE (var
) == SSA_NAME
139 && SSA_NAME_VALUE (var
)
140 && is_gimple_min_invariant (SSA_NAME_VALUE (var
)))
142 val
.lattice_val
= CONSTANT
;
143 val
.const_val
= SSA_NAME_VALUE (var
);
145 else if (TREE_CODE (sym
) == PARM_DECL
|| TREE_THIS_VOLATILE (sym
))
147 /* Function arguments and volatile variables are considered VARYING. */
148 val
.lattice_val
= VARYING
;
150 else if (TREE_STATIC (sym
))
152 /* Globals and static variables are considered UNKNOWN_VAL,
153 unless they are declared 'const'. */
154 if (TREE_READONLY (sym
)
155 && DECL_INITIAL (sym
)
156 && is_gimple_min_invariant (DECL_INITIAL (sym
)))
158 val
.lattice_val
= CONSTANT
;
159 val
.const_val
= DECL_INITIAL (sym
);
163 val
.const_val
= NULL_TREE
;
164 val
.lattice_val
= UNKNOWN_VAL
;
167 else if (!is_gimple_reg (sym
))
169 val
.const_val
= NULL_TREE
;
170 val
.lattice_val
= UNKNOWN_VAL
;
175 tree stmt
= SSA_NAME_DEF_STMT (var
);
177 if (!IS_EMPTY_STMT (stmt
))
179 code
= TREE_CODE (stmt
);
180 if (code
!= MODIFY_EXPR
&& code
!= PHI_NODE
)
181 val
.lattice_val
= VARYING
;
188 /* Get the constant value associated with variable VAR. */
195 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
197 val
= &value_vector
[SSA_NAME_VERSION (var
)];
198 if (val
->lattice_val
== UNINITIALIZED
)
199 *val
= get_default_value (var
);
205 /* Set the lattice value for variable VAR to VAL. Return true if VAL
206 is different from VAR's previous value. */
209 set_lattice_value (tree var
, value val
)
211 value
*old
= get_value (var
);
213 if (val
.lattice_val
== UNDEFINED
)
215 /* CONSTANT->UNDEFINED is never a valid state transition. */
216 gcc_assert (old
->lattice_val
!= CONSTANT
);
218 /* UNKNOWN_VAL->UNDEFINED is never a valid state transition. */
219 gcc_assert (old
->lattice_val
!= UNKNOWN_VAL
);
221 /* VARYING->UNDEFINED is generally not a valid state transition,
222 except for values which are initialized to VARYING. */
223 gcc_assert (old
->lattice_val
!= VARYING
224 || get_default_value (var
).lattice_val
== VARYING
);
226 else if (val
.lattice_val
== CONSTANT
)
227 /* VARYING -> CONSTANT is an invalid state transition, except
228 for objects which start off in a VARYING state. */
229 gcc_assert (old
->lattice_val
!= VARYING
230 || get_default_value (var
).lattice_val
== VARYING
);
232 /* If the constant for VAR has changed, then this VAR is really varying. */
233 if (old
->lattice_val
== CONSTANT
234 && val
.lattice_val
== CONSTANT
235 && !simple_cst_equal (old
->const_val
, val
.const_val
))
237 val
.lattice_val
= VARYING
;
238 val
.const_val
= NULL_TREE
;
241 if (old
->lattice_val
!= val
.lattice_val
)
243 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
245 dump_lattice_value (dump_file
, "Lattice value changed to ", val
);
246 fprintf (dump_file
, ". Adding definition to SSA edges.\n");
257 /* Set the lattice value for the variable VAR to VARYING. */
260 def_to_varying (tree var
)
263 val
.lattice_val
= VARYING
;
264 val
.const_val
= NULL_TREE
;
265 set_lattice_value (var
, val
);
269 /* Return the likely latticevalue for STMT.
271 If STMT has no operands, then return CONSTANT.
273 Else if any operands of STMT are undefined, then return UNDEFINED.
275 Else if any operands of STMT are constants, then return CONSTANT.
277 Else return VARYING. */
280 likely_value (tree stmt
)
283 int found_constant
= 0;
288 /* If the statement makes aliased loads or has volatile operands, it
289 won't fold to a constant value. */
290 ann
= stmt_ann (stmt
);
291 if (ann
->makes_aliased_loads
|| ann
->has_volatile_ops
)
294 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
295 in the presence of const and pure calls. */
296 if (get_call_expr_in (stmt
) != NULL_TREE
)
299 get_stmt_operands (stmt
);
301 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
303 value
*val
= get_value (use
);
305 if (val
->lattice_val
== UNDEFINED
)
308 if (val
->lattice_val
== CONSTANT
)
312 vuses
= VUSE_OPS (ann
);
314 if (NUM_VUSES (vuses
))
316 tree vuse
= VUSE_OP (vuses
, 0);
317 value
*val
= get_value (vuse
);
319 if (val
->lattice_val
== UNKNOWN_VAL
)
322 /* There should be no VUSE operands that are UNDEFINED. */
323 gcc_assert (val
->lattice_val
!= UNDEFINED
);
325 if (val
->lattice_val
== CONSTANT
)
329 return ((found_constant
|| (!USE_OPS (ann
) && !vuses
)) ? CONSTANT
: VARYING
);
333 /* Function indicating whether we ought to include information for VAR
334 when calculating immediate uses. */
337 need_imm_uses_for (tree var
)
339 return get_value (var
)->lattice_val
!= VARYING
;
343 /* Initialize local data structures for CCP. */
346 ccp_initialize (void)
351 value_vector
= (value
*) xmalloc (num_ssa_names
* sizeof (value
));
352 memset (value_vector
, 0, num_ssa_names
* sizeof (value
));
354 /* Set of SSA_NAMEs that are defined by a V_MAY_DEF. */
355 is_may_def
= sbitmap_alloc (num_ssa_names
);
356 sbitmap_zero (is_may_def
);
358 /* Initialize simulation flags for PHI nodes and statements. */
361 block_stmt_iterator i
;
363 /* Mark all V_MAY_DEF operands VARYING. */
364 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
366 bool is_varying
= false;
367 tree stmt
= bsi_stmt (i
);
371 get_stmt_operands (stmt
);
373 /* Get the default value for each DEF and V_MUST_DEF. */
374 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
,
375 (SSA_OP_DEF
| SSA_OP_VMUSTDEF
))
377 if (get_value (def
)->lattice_val
== VARYING
)
381 /* Mark all V_MAY_DEF operands VARYING. */
382 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_VMAYDEF
)
384 get_value (def
)->lattice_val
= VARYING
;
385 SET_BIT (is_may_def
, SSA_NAME_VERSION (def
));
388 /* Statements other than MODIFY_EXPR, COND_EXPR and
389 SWITCH_EXPR are not interesting for constant propagation.
390 Mark them VARYING. */
391 if (TREE_CODE (stmt
) != MODIFY_EXPR
392 && TREE_CODE (stmt
) != COND_EXPR
393 && TREE_CODE (stmt
) != SWITCH_EXPR
)
396 DONT_SIMULATE_AGAIN (stmt
) = is_varying
;
400 /* Now process PHI nodes. */
406 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
408 value
*val
= get_value (PHI_RESULT (phi
));
410 for (x
= 0; x
< PHI_NUM_ARGS (phi
); x
++)
412 var
= PHI_ARG_DEF (phi
, x
);
414 /* If one argument has a V_MAY_DEF, the result is
416 if (TREE_CODE (var
) == SSA_NAME
)
418 if (TEST_BIT (is_may_def
, SSA_NAME_VERSION (var
)))
420 val
->lattice_val
= VARYING
;
421 SET_BIT (is_may_def
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
427 DONT_SIMULATE_AGAIN (phi
) = (val
->lattice_val
== VARYING
);
431 sbitmap_free (is_may_def
);
433 /* Compute immediate uses for variables we care about. */
434 compute_immediate_uses (TDFA_USE_OPS
| TDFA_USE_VOPS
, need_imm_uses_for
);
438 /* Replace USE references in statement STMT with their immediate reaching
439 definition. Return true if at least one reference was replaced. If
440 REPLACED_ADDRESSES_P is given, it will be set to true if an address
441 constant was replaced. */
444 replace_uses_in (tree stmt
, bool *replaced_addresses_p
)
446 bool replaced
= false;
450 if (replaced_addresses_p
)
451 *replaced_addresses_p
= false;
453 get_stmt_operands (stmt
);
455 FOR_EACH_SSA_USE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
457 tree tuse
= USE_FROM_PTR (use
);
458 value
*val
= get_value (tuse
);
460 if (val
->lattice_val
!= CONSTANT
)
463 if (TREE_CODE (stmt
) == ASM_EXPR
464 && !may_propagate_copy_into_asm (tuse
))
467 SET_USE (use
, val
->const_val
);
470 if (POINTER_TYPE_P (TREE_TYPE (tuse
)) && replaced_addresses_p
)
471 *replaced_addresses_p
= true;
478 /* Replace the VUSE references in statement STMT with its immediate reaching
479 definition. Return true if the reference was replaced. If
480 REPLACED_ADDRESSES_P is given, it will be set to true if an address
481 constant was replaced. */
484 replace_vuse_in (tree stmt
, bool *replaced_addresses_p
)
486 bool replaced
= false;
491 if (replaced_addresses_p
)
492 *replaced_addresses_p
= false;
494 get_stmt_operands (stmt
);
496 vuses
= STMT_VUSE_OPS (stmt
);
498 if (NUM_VUSES (vuses
) != 1)
501 vuse
= VUSE_OP_PTR (vuses
, 0);
502 val
= get_value (USE_FROM_PTR (vuse
));
504 if (val
->lattice_val
== CONSTANT
505 && TREE_CODE (stmt
) == MODIFY_EXPR
506 && DECL_P (TREE_OPERAND (stmt
, 1))
507 && TREE_OPERAND (stmt
, 1) == SSA_NAME_VAR (USE_FROM_PTR (vuse
)))
509 TREE_OPERAND (stmt
, 1) = val
->const_val
;
511 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (vuse
)))
512 && replaced_addresses_p
)
513 *replaced_addresses_p
= true;
520 /* Perform final substitution and folding. After this pass the program
521 should still be in SSA form. */
524 substitute_and_fold (void)
529 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
531 "\nSubstituing constants and folding statements\n\n");
533 /* Substitute constants in every statement of every basic block. */
536 block_stmt_iterator i
;
539 /* Propagate our known constants into PHI nodes. */
540 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
544 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
547 use_operand_p orig_p
= PHI_ARG_DEF_PTR (phi
, i
);
548 tree orig
= USE_FROM_PTR (orig_p
);
550 if (! SSA_VAR_P (orig
))
553 new_val
= get_value (orig
);
554 if (new_val
->lattice_val
== CONSTANT
555 && may_propagate_copy (orig
, new_val
->const_val
))
556 SET_USE (orig_p
, new_val
->const_val
);
560 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
562 bool replaced_address
;
563 tree stmt
= bsi_stmt (i
);
565 /* Skip statements that have been folded already. */
566 if (stmt_modified_p (stmt
) || !is_exec_stmt (stmt
))
569 /* Replace the statement with its folded version and mark it
571 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
573 fprintf (dump_file
, "Line %d: replaced ", get_lineno (stmt
));
574 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
577 if (replace_uses_in (stmt
, &replaced_address
)
578 || replace_vuse_in (stmt
, &replaced_address
))
580 bool changed
= fold_stmt (bsi_stmt_ptr (i
));
582 /* If we folded a builtin function, we'll likely
583 need to rename VDEFs. */
584 if (replaced_address
|| changed
)
586 mark_new_vars_to_rename (stmt
, vars_to_rename
);
587 if (maybe_clean_eh_stmt (stmt
))
588 tree_purge_dead_eh_edges (bb
);
594 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
596 fprintf (dump_file
, " with ");
597 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
598 fprintf (dump_file
, "\n");
603 /* And transfer what we learned from VALUE_VECTOR into the
604 SSA_NAMEs themselves. This probably isn't terribly important
605 since we probably constant propagated the values to their
607 for (i
= 0; i
< num_ssa_names
; i
++)
609 tree name
= ssa_name (i
);
615 value
= get_value (name
);
616 if (value
->lattice_val
== CONSTANT
617 && is_gimple_reg (name
)
618 && is_gimple_min_invariant (value
->const_val
))
619 SSA_NAME_VALUE (name
) = value
->const_val
;
624 /* Free allocated storage. */
629 /* Perform substitutions based on the known constant values. */
630 substitute_and_fold ();
632 /* Now cleanup any unreachable code. */
640 /* Compute the meet operator between VAL1 and VAL2:
642 any M UNDEFINED = any
643 any M VARYING = VARYING
644 any M UNKNOWN_VAL = UNKNOWN_VAL
645 Ci M Cj = Ci if (i == j)
646 Ci M Cj = VARYING if (i != j) */
648 ccp_lattice_meet (value val1
, value val2
)
652 /* any M UNDEFINED = any. */
653 if (val1
.lattice_val
== UNDEFINED
)
655 else if (val2
.lattice_val
== UNDEFINED
)
658 /* any M VARYING = VARYING. */
659 if (val1
.lattice_val
== VARYING
|| val2
.lattice_val
== VARYING
)
661 result
.lattice_val
= VARYING
;
662 result
.const_val
= NULL_TREE
;
666 /* any M UNKNOWN_VAL = UNKNOWN_VAL. */
667 if (val1
.lattice_val
== UNKNOWN_VAL
668 || val2
.lattice_val
== UNKNOWN_VAL
)
670 result
.lattice_val
= UNKNOWN_VAL
;
671 result
.const_val
= NULL_TREE
;
675 /* Ci M Cj = Ci if (i == j)
676 Ci M Cj = VARYING if (i != j) */
677 if (simple_cst_equal (val1
.const_val
, val2
.const_val
) == 1)
679 result
.lattice_val
= CONSTANT
;
680 result
.const_val
= val1
.const_val
;
684 result
.lattice_val
= VARYING
;
685 result
.const_val
= NULL_TREE
;
692 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
693 lattice values to determine PHI_NODE's lattice value. The value of a
694 PHI node is determined calling ccp_lattice_meet() with all the arguments
695 of the PHI node that are incoming via executable edges. */
697 static enum ssa_prop_result
698 ccp_visit_phi_node (tree phi
)
700 value new_val
, *old_val
;
703 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
705 fprintf (dump_file
, "\nVisiting PHI node: ");
706 print_generic_expr (dump_file
, phi
, dump_flags
);
709 old_val
= get_value (PHI_RESULT (phi
));
710 switch (old_val
->lattice_val
)
713 return SSA_PROP_NOT_INTERESTING
;
720 /* To avoid the default value of UNKNOWN_VAL overriding
721 that of its possible constant arguments, temporarily
722 set the PHI node's default lattice value to be
723 UNDEFINED. If the PHI node's old value was UNKNOWN_VAL and
724 the new value is UNDEFINED, then we prevent the invalid
725 transition by not calling set_lattice_value. */
726 new_val
.lattice_val
= UNDEFINED
;
727 new_val
.const_val
= NULL_TREE
;
732 new_val
.lattice_val
= UNDEFINED
;
733 new_val
.const_val
= NULL_TREE
;
740 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
742 /* Compute the meet operator over all the PHI arguments. */
743 edge e
= PHI_ARG_EDGE (phi
, i
);
745 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
748 "\n Argument #%d (%d -> %d %sexecutable)\n",
749 i
, e
->src
->index
, e
->dest
->index
,
750 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
753 /* If the incoming edge is executable, Compute the meet operator for
754 the existing value of the PHI node and the current PHI argument. */
755 if (e
->flags
& EDGE_EXECUTABLE
)
757 tree rdef
= PHI_ARG_DEF (phi
, i
);
758 value
*rdef_val
, val
;
760 if (is_gimple_min_invariant (rdef
))
762 val
.lattice_val
= CONSTANT
;
763 val
.const_val
= rdef
;
767 rdef_val
= get_value (rdef
);
769 new_val
= ccp_lattice_meet (new_val
, *rdef_val
);
771 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
773 fprintf (dump_file
, "\t");
774 print_generic_expr (dump_file
, rdef
, dump_flags
);
775 dump_lattice_value (dump_file
, "\tValue: ", *rdef_val
);
776 fprintf (dump_file
, "\n");
779 if (new_val
.lattice_val
== VARYING
)
784 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
786 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
787 fprintf (dump_file
, "\n\n");
790 /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
791 if (old_val
->lattice_val
== UNKNOWN_VAL
792 && new_val
.lattice_val
== UNDEFINED
)
793 return SSA_PROP_NOT_INTERESTING
;
795 /* Otherwise, make the transition to the new value. */
796 if (set_lattice_value (PHI_RESULT (phi
), new_val
))
798 if (new_val
.lattice_val
== VARYING
)
799 return SSA_PROP_VARYING
;
801 return SSA_PROP_INTERESTING
;
804 return SSA_PROP_NOT_INTERESTING
;
808 /* CCP specific front-end to the non-destructive constant folding
811 Attempt to simplify the RHS of STMT knowing that one or more
812 operands are constants.
814 If simplification is possible, return the simplified RHS,
815 otherwise return the original RHS. */
820 tree rhs
= get_rhs (stmt
);
821 enum tree_code code
= TREE_CODE (rhs
);
822 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
823 tree retval
= NULL_TREE
;
826 vuses
= STMT_VUSE_OPS (stmt
);
828 /* If the RHS is just a variable, then that variable must now have
829 a constant value that we can return directly. */
830 if (TREE_CODE (rhs
) == SSA_NAME
)
831 return get_value (rhs
)->const_val
;
832 else if (DECL_P (rhs
)
833 && NUM_VUSES (vuses
) == 1
834 && rhs
== SSA_NAME_VAR (VUSE_OP (vuses
, 0)))
835 return get_value (VUSE_OP (vuses
, 0))->const_val
;
837 /* Unary operators. Note that we know the single operand must
838 be a constant. So this should almost always return a
840 if (kind
== tcc_unary
)
842 /* Handle unary operators which can appear in GIMPLE form. */
843 tree op0
= TREE_OPERAND (rhs
, 0);
845 /* Simplify the operand down to a constant. */
846 if (TREE_CODE (op0
) == SSA_NAME
)
848 value
*val
= get_value (op0
);
849 if (val
->lattice_val
== CONSTANT
)
850 op0
= get_value (op0
)->const_val
;
853 retval
= nondestructive_fold_unary_to_constant (code
,
857 /* If we folded, but did not create an invariant, then we can not
858 use this expression. */
859 if (retval
&& ! is_gimple_min_invariant (retval
))
862 /* If we could not fold the expression, but the arguments are all
863 constants and gimple values, then build and return the new
866 In some cases the new expression is still something we can
867 use as a replacement for an argument. This happens with
868 NOP conversions of types for example.
870 In other cases the new expression can not be used as a
871 replacement for an argument (as it would create non-gimple
872 code). But the new expression can still be used to derive
874 if (! retval
&& is_gimple_min_invariant (op0
))
875 return build1 (code
, TREE_TYPE (rhs
), op0
);
878 /* Binary and comparison operators. We know one or both of the
879 operands are constants. */
880 else if (kind
== tcc_binary
881 || kind
== tcc_comparison
882 || code
== TRUTH_AND_EXPR
883 || code
== TRUTH_OR_EXPR
884 || code
== TRUTH_XOR_EXPR
)
886 /* Handle binary and comparison operators that can appear in
888 tree op0
= TREE_OPERAND (rhs
, 0);
889 tree op1
= TREE_OPERAND (rhs
, 1);
891 /* Simplify the operands down to constants when appropriate. */
892 if (TREE_CODE (op0
) == SSA_NAME
)
894 value
*val
= get_value (op0
);
895 if (val
->lattice_val
== CONSTANT
)
896 op0
= val
->const_val
;
899 if (TREE_CODE (op1
) == SSA_NAME
)
901 value
*val
= get_value (op1
);
902 if (val
->lattice_val
== CONSTANT
)
903 op1
= val
->const_val
;
906 retval
= nondestructive_fold_binary_to_constant (code
,
910 /* If we folded, but did not create an invariant, then we can not
911 use this expression. */
912 if (retval
&& ! is_gimple_min_invariant (retval
))
915 /* If we could not fold the expression, but the arguments are all
916 constants and gimple values, then build and return the new
919 In some cases the new expression is still something we can
920 use as a replacement for an argument. This happens with
921 NOP conversions of types for example.
923 In other cases the new expression can not be used as a
924 replacement for an argument (as it would create non-gimple
925 code). But the new expression can still be used to derive
928 && is_gimple_min_invariant (op0
)
929 && is_gimple_min_invariant (op1
))
930 return build (code
, TREE_TYPE (rhs
), op0
, op1
);
933 /* We may be able to fold away calls to builtin functions if their
934 arguments are constants. */
935 else if (code
== CALL_EXPR
936 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == ADDR_EXPR
937 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0))
939 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)))
941 use_optype uses
= STMT_USE_OPS (stmt
);
942 if (NUM_USES (uses
) != 0)
947 /* Preserve the original values of every operand. */
948 orig
= xmalloc (sizeof (tree
) * NUM_USES (uses
));
949 for (i
= 0; i
< NUM_USES (uses
); i
++)
950 orig
[i
] = USE_OP (uses
, i
);
952 /* Substitute operands with their values and try to fold. */
953 replace_uses_in (stmt
, NULL
);
954 retval
= fold_builtin (rhs
, false);
956 /* Restore operands to their original form. */
957 for (i
= 0; i
< NUM_USES (uses
); i
++)
958 SET_USE_OP (uses
, i
, orig
[i
]);
965 /* If we got a simplified form, see if we need to convert its type. */
967 return fold_convert (TREE_TYPE (rhs
), retval
);
969 /* No simplification was possible. */
974 /* Evaluate statement STMT. */
977 evaluate_stmt (tree stmt
)
981 latticevalue likelyvalue
= likely_value (stmt
);
983 /* If the statement is likely to have a CONSTANT result, then try
984 to fold the statement to determine the constant value. */
985 if (likelyvalue
== CONSTANT
)
986 simplified
= ccp_fold (stmt
);
987 /* If the statement is likely to have a VARYING result, then do not
988 bother folding the statement. */
989 else if (likelyvalue
== VARYING
)
990 simplified
= get_rhs (stmt
);
991 /* Otherwise the statement is likely to have an UNDEFINED value and
992 there will be nothing to do. */
994 simplified
= NULL_TREE
;
996 if (simplified
&& is_gimple_min_invariant (simplified
))
998 /* The statement produced a constant value. */
999 val
.lattice_val
= CONSTANT
;
1000 val
.const_val
= simplified
;
1004 /* The statement produced a nonconstant value. If the statement
1005 had undefined or virtual operands, then the result of the
1006 statement should be undefined or virtual respectively.
1007 Else the result of the statement is VARYING. */
1008 val
.lattice_val
= (likelyvalue
== UNDEFINED
? UNDEFINED
: VARYING
);
1009 val
.lattice_val
= (likelyvalue
== UNKNOWN_VAL
1010 ? UNKNOWN_VAL
: val
.lattice_val
);
1011 val
.const_val
= NULL_TREE
;
1018 /* Visit the assignment statement STMT. Set the value of its LHS to the
1019 value computed by the RHS and store LHS in *OUTPUT_P. */
1021 static enum ssa_prop_result
1022 visit_assignment (tree stmt
, tree
*output_p
)
1027 v_must_def_optype v_must_defs
;
1029 lhs
= TREE_OPERAND (stmt
, 0);
1030 rhs
= TREE_OPERAND (stmt
, 1);
1031 vuses
= STMT_VUSE_OPS (stmt
);
1032 v_must_defs
= STMT_V_MUST_DEF_OPS (stmt
);
1034 gcc_assert (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt
)) == 0);
1035 gcc_assert (NUM_V_MUST_DEFS (v_must_defs
) == 1
1036 || TREE_CODE (lhs
) == SSA_NAME
);
1038 /* We require the SSA version number of the lhs for the value_vector.
1039 Make sure we have it. */
1040 if (TREE_CODE (lhs
) != SSA_NAME
)
1042 /* If we make it here, then stmt only has one definition:
1044 lhs
= V_MUST_DEF_OP (v_must_defs
, 0);
1047 if (TREE_CODE (rhs
) == SSA_NAME
)
1049 /* For a simple copy operation, we copy the lattice values. */
1050 value
*nval
= get_value (rhs
);
1053 else if (DECL_P (rhs
)
1054 && NUM_VUSES (vuses
) == 1
1055 && rhs
== SSA_NAME_VAR (VUSE_OP (vuses
, 0)))
1057 /* Same as above, but the rhs is not a gimple register and yet
1058 has a known VUSE. */
1059 value
*nval
= get_value (VUSE_OP (vuses
, 0));
1064 /* Evaluate the statement. */
1065 val
= evaluate_stmt (stmt
);
1068 /* FIXME: Hack. If this was a definition of a bitfield, we need to widen
1069 the constant value into the type of the destination variable. This
1070 should not be necessary if GCC represented bitfields properly. */
1072 tree lhs
= TREE_OPERAND (stmt
, 0);
1073 if (val
.lattice_val
== CONSTANT
1074 && TREE_CODE (lhs
) == COMPONENT_REF
1075 && DECL_BIT_FIELD (TREE_OPERAND (lhs
, 1)))
1077 tree w
= widen_bitfield (val
.const_val
, TREE_OPERAND (lhs
, 1), lhs
);
1079 if (w
&& is_gimple_min_invariant (w
))
1083 val
.lattice_val
= VARYING
;
1084 val
.const_val
= NULL
;
1089 /* If LHS is not a gimple register, then it cannot take on an
1091 if (!is_gimple_reg (SSA_NAME_VAR (lhs
))
1092 && val
.lattice_val
== UNDEFINED
)
1093 val
.lattice_val
= UNKNOWN_VAL
;
1095 /* Set the lattice value of the statement's output. */
1096 if (set_lattice_value (lhs
, val
))
1099 if (val
.lattice_val
== VARYING
)
1100 return SSA_PROP_VARYING
;
1102 return SSA_PROP_INTERESTING
;
1105 return SSA_PROP_NOT_INTERESTING
;
1109 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1110 if it can determine which edge will be taken. Otherwise, return
1111 SSA_PROP_VARYING. */
1113 static enum ssa_prop_result
1114 visit_cond_stmt (tree stmt
, edge
*taken_edge_p
)
1119 block
= bb_for_stmt (stmt
);
1120 val
= evaluate_stmt (stmt
);
1122 /* Find which edge out of the conditional block will be taken and add it
1123 to the worklist. If no single edge can be determined statically,
1124 return SSA_PROP_VARYING to feed all the outgoing edges to the
1125 propagation engine. */
1126 *taken_edge_p
= find_taken_edge (block
, val
.const_val
);
1128 return SSA_PROP_INTERESTING
;
1130 return SSA_PROP_VARYING
;
1134 /* Evaluate statement STMT. If the statement produces an output value and
1135 its evaluation changes the lattice value of its output, return
1136 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1139 If STMT is a conditional branch and we can determine its truth
1140 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1141 value, return SSA_PROP_VARYING. */
1143 static enum ssa_prop_result
1144 ccp_visit_stmt (tree stmt
, edge
*taken_edge_p
, tree
*output_p
)
1147 v_may_def_optype v_may_defs
;
1148 v_must_def_optype v_must_defs
;
1152 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1154 fprintf (dump_file
, "\nVisiting statement: ");
1155 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
1156 fprintf (dump_file
, "\n");
1159 ann
= stmt_ann (stmt
);
1161 v_must_defs
= V_MUST_DEF_OPS (ann
);
1162 v_may_defs
= V_MAY_DEF_OPS (ann
);
1163 if (TREE_CODE (stmt
) == MODIFY_EXPR
1164 && NUM_V_MAY_DEFS (v_may_defs
) == 0
1165 && (NUM_V_MUST_DEFS (v_must_defs
) == 1
1166 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == SSA_NAME
))
1168 /* If the statement is an assignment that produces a single
1169 output value, evaluate its RHS to see if the lattice value of
1170 its output has changed. */
1171 return visit_assignment (stmt
, output_p
);
1173 else if (TREE_CODE (stmt
) == COND_EXPR
|| TREE_CODE (stmt
) == SWITCH_EXPR
)
1175 /* If STMT is a conditional branch, see if we can determine
1176 which branch will be taken. */
1177 return visit_cond_stmt (stmt
, taken_edge_p
);
1180 /* Any other kind of statement is not interesting for constant
1181 propagation and, therefore, not worth simulating. */
1182 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1183 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1185 /* Definitions made by statements other than assignments to
1186 SSA_NAMEs represent unknown modifications to their outputs.
1187 Mark them VARYING. */
1188 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_DEF
)
1189 def_to_varying (def
);
1191 /* Mark all V_MAY_DEF operands VARYING. */
1192 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_VMAYDEF
)
1193 def_to_varying (def
);
1195 return SSA_PROP_VARYING
;
1199 /* Main entry point for SSA Conditional Constant Propagation.
1201 [ DESCRIBE MAIN ALGORITHM HERE ] */
1204 execute_ssa_ccp (void)
1207 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1215 return flag_tree_ccp
!= 0;
1219 struct tree_opt_pass pass_ccp
=
1222 gate_ccp
, /* gate */
1223 execute_ssa_ccp
, /* execute */
1226 0, /* static_pass_number */
1227 TV_TREE_CCP
, /* tv_id */
1228 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
1229 0, /* properties_provided */
1230 0, /* properties_destroyed */
1231 0, /* todo_flags_start */
1232 TODO_dump_func
| TODO_rename_vars
1233 | TODO_ggc_collect
| TODO_verify_ssa
1234 | TODO_verify_stmts
, /* todo_flags_finish */
1239 /* Given a constant value VAL for bitfield FIELD, and a destination
1240 variable VAR, return VAL appropriately widened to fit into VAR. If
1241 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1244 widen_bitfield (tree val
, tree field
, tree var
)
1246 unsigned HOST_WIDE_INT var_size
, field_size
;
1248 unsigned HOST_WIDE_INT mask
;
1251 /* We can only do this if the size of the type and field and VAL are
1252 all constants representable in HOST_WIDE_INT. */
1253 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var
)), 1)
1254 || !host_integerp (DECL_SIZE (field
), 1)
1255 || !host_integerp (val
, 0))
1258 var_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1);
1259 field_size
= tree_low_cst (DECL_SIZE (field
), 1);
1261 /* Give up if either the bitfield or the variable are too wide. */
1262 if (field_size
> HOST_BITS_PER_WIDE_INT
|| var_size
> HOST_BITS_PER_WIDE_INT
)
1265 gcc_assert (var_size
>= field_size
);
1267 /* If the sign bit of the value is not set or the field's type is unsigned,
1268 just mask off the high order bits of the value. */
1269 if (DECL_UNSIGNED (field
)
1270 || !(tree_low_cst (val
, 0) & (((HOST_WIDE_INT
)1) << (field_size
- 1))))
1272 /* Zero extension. Build a mask with the lower 'field_size' bits
1273 set and a BIT_AND_EXPR node to clear the high order bits of
1275 for (i
= 0, mask
= 0; i
< field_size
; i
++)
1276 mask
|= ((HOST_WIDE_INT
) 1) << i
;
1278 wide_val
= build (BIT_AND_EXPR
, TREE_TYPE (var
), val
,
1279 fold_convert (TREE_TYPE (var
),
1280 build_int_cst (NULL_TREE
, mask
)));
1284 /* Sign extension. Create a mask with the upper 'field_size'
1285 bits set and a BIT_IOR_EXPR to set the high order bits of the
1287 for (i
= 0, mask
= 0; i
< (var_size
- field_size
); i
++)
1288 mask
|= ((HOST_WIDE_INT
) 1) << (var_size
- i
- 1);
1290 wide_val
= build (BIT_IOR_EXPR
, TREE_TYPE (var
), val
,
1291 fold_convert (TREE_TYPE (var
),
1292 build_int_cst (NULL_TREE
, mask
)));
1295 return fold (wide_val
);
1299 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1300 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1301 is the desired result type. */
1304 maybe_fold_offset_to_array_ref (tree base
, tree offset
, tree orig_type
)
1306 tree min_idx
, idx
, elt_offset
= integer_zero_node
;
1307 tree array_type
, elt_type
, elt_size
;
1309 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1310 measured in units of the size of elements type) from that ARRAY_REF).
1311 We can't do anything if either is variable.
1313 The case we handle here is *(&A[N]+O). */
1314 if (TREE_CODE (base
) == ARRAY_REF
)
1316 tree low_bound
= array_ref_low_bound (base
);
1318 elt_offset
= TREE_OPERAND (base
, 1);
1319 if (TREE_CODE (low_bound
) != INTEGER_CST
1320 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1323 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1324 base
= TREE_OPERAND (base
, 0);
1327 /* Ignore stupid user tricks of indexing non-array variables. */
1328 array_type
= TREE_TYPE (base
);
1329 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1331 elt_type
= TREE_TYPE (array_type
);
1332 if (!lang_hooks
.types_compatible_p (orig_type
, elt_type
))
1335 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1336 element type (so we can use the alignment if it's not constant).
1337 Otherwise, compute the offset as an index by using a division. If the
1338 division isn't exact, then don't do anything. */
1339 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1340 if (integer_zerop (offset
))
1342 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1343 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1345 idx
= integer_zero_node
;
1349 unsigned HOST_WIDE_INT lquo
, lrem
;
1350 HOST_WIDE_INT hquo
, hrem
;
1352 if (TREE_CODE (elt_size
) != INTEGER_CST
1353 || div_and_round_double (TRUNC_DIV_EXPR
, 1,
1354 TREE_INT_CST_LOW (offset
),
1355 TREE_INT_CST_HIGH (offset
),
1356 TREE_INT_CST_LOW (elt_size
),
1357 TREE_INT_CST_HIGH (elt_size
),
1358 &lquo
, &hquo
, &lrem
, &hrem
)
1362 idx
= build_int_cst_wide (NULL_TREE
, lquo
, hquo
);
1365 /* Assume the low bound is zero. If there is a domain type, get the
1366 low bound, if any, convert the index into that type, and add the
1368 min_idx
= integer_zero_node
;
1369 if (TYPE_DOMAIN (array_type
))
1371 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
)))
1372 min_idx
= TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
));
1374 min_idx
= fold_convert (TYPE_DOMAIN (array_type
), min_idx
);
1376 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1379 idx
= fold_convert (TYPE_DOMAIN (array_type
), idx
);
1380 elt_offset
= fold_convert (TYPE_DOMAIN (array_type
), elt_offset
);
1383 if (!integer_zerop (min_idx
))
1384 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1385 if (!integer_zerop (elt_offset
))
1386 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1388 return build (ARRAY_REF
, orig_type
, base
, idx
, min_idx
,
1389 size_int (tree_low_cst (elt_size
, 1)
1390 / (TYPE_ALIGN_UNIT (elt_type
))));
1394 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1395 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1396 is the desired result type. */
1397 /* ??? This doesn't handle class inheritance. */
1400 maybe_fold_offset_to_component_ref (tree record_type
, tree base
, tree offset
,
1401 tree orig_type
, bool base_is_ptr
)
1403 tree f
, t
, field_type
, tail_array_field
, field_offset
;
1405 if (TREE_CODE (record_type
) != RECORD_TYPE
1406 && TREE_CODE (record_type
) != UNION_TYPE
1407 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1410 /* Short-circuit silly cases. */
1411 if (lang_hooks
.types_compatible_p (record_type
, orig_type
))
1414 tail_array_field
= NULL_TREE
;
1415 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1419 if (TREE_CODE (f
) != FIELD_DECL
)
1421 if (DECL_BIT_FIELD (f
))
1424 field_offset
= byte_position (f
);
1425 if (TREE_CODE (field_offset
) != INTEGER_CST
)
1428 /* ??? Java creates "interesting" fields for representing base classes.
1429 They have no name, and have no context. With no context, we get into
1430 trouble with nonoverlapping_component_refs_p. Skip them. */
1431 if (!DECL_FIELD_CONTEXT (f
))
1434 /* The previous array field isn't at the end. */
1435 tail_array_field
= NULL_TREE
;
1437 /* Check to see if this offset overlaps with the field. */
1438 cmp
= tree_int_cst_compare (field_offset
, offset
);
1442 field_type
= TREE_TYPE (f
);
1445 /* Don't care about offsets into the middle of scalars. */
1446 if (!AGGREGATE_TYPE_P (field_type
))
1449 /* Check for array at the end of the struct. This is often
1450 used as for flexible array members. We should be able to
1451 turn this into an array access anyway. */
1452 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1453 tail_array_field
= f
;
1455 /* Check the end of the field against the offset. */
1456 if (!DECL_SIZE_UNIT (f
)
1457 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1459 t
= int_const_binop (MINUS_EXPR
, offset
, DECL_FIELD_OFFSET (f
), 1);
1460 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
1463 /* If we matched, then set offset to the displacement into
1468 /* Here we exactly match the offset being checked. If the types match,
1469 then we can return that field. */
1470 else if (lang_hooks
.types_compatible_p (orig_type
, field_type
))
1473 base
= build1 (INDIRECT_REF
, record_type
, base
);
1474 t
= build (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1478 /* Don't care about type-punning of scalars. */
1479 else if (!AGGREGATE_TYPE_P (field_type
))
1485 if (!tail_array_field
)
1488 f
= tail_array_field
;
1489 field_type
= TREE_TYPE (f
);
1492 /* If we get here, we've got an aggregate field, and a possibly
1493 nonzero offset into them. Recurse and hope for a valid match. */
1495 base
= build1 (INDIRECT_REF
, record_type
, base
);
1496 base
= build (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1498 t
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
);
1501 return maybe_fold_offset_to_component_ref (field_type
, base
, offset
,
1506 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1507 Return the simplified expression, or NULL if nothing could be done. */
1510 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
1514 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1515 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1516 are sometimes added. */
1519 TREE_OPERAND (expr
, 0) = base
;
1521 /* One possibility is that the address reduces to a string constant. */
1522 t
= fold_read_from_constant_string (expr
);
1526 /* Add in any offset from a PLUS_EXPR. */
1527 if (TREE_CODE (base
) == PLUS_EXPR
)
1531 offset2
= TREE_OPERAND (base
, 1);
1532 if (TREE_CODE (offset2
) != INTEGER_CST
)
1534 base
= TREE_OPERAND (base
, 0);
1536 offset
= int_const_binop (PLUS_EXPR
, offset
, offset2
, 1);
1539 if (TREE_CODE (base
) == ADDR_EXPR
)
1541 /* Strip the ADDR_EXPR. */
1542 base
= TREE_OPERAND (base
, 0);
1544 /* Fold away CONST_DECL to its value, if the type is scalar. */
1545 if (TREE_CODE (base
) == CONST_DECL
1546 && is_gimple_min_invariant (DECL_INITIAL (base
)))
1547 return DECL_INITIAL (base
);
1549 /* Try folding *(&B+O) to B[X]. */
1550 t
= maybe_fold_offset_to_array_ref (base
, offset
, TREE_TYPE (expr
));
1554 /* Try folding *(&B+O) to B.X. */
1555 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (base
), base
, offset
,
1556 TREE_TYPE (expr
), false);
1560 /* Fold *&B to B. We can only do this if EXPR is the same type
1561 as BASE. We can't do this if EXPR is the element type of an array
1562 and BASE is the array. */
1563 if (integer_zerop (offset
)
1564 && lang_hooks
.types_compatible_p (TREE_TYPE (base
),
1570 /* We can get here for out-of-range string constant accesses,
1571 such as "_"[3]. Bail out of the entire substitution search
1572 and arrange for the entire statement to be replaced by a
1573 call to __builtin_trap. In all likelyhood this will all be
1574 constant-folded away, but in the meantime we can't leave with
1575 something that get_expr_operands can't understand. */
1579 if (TREE_CODE (t
) == ADDR_EXPR
1580 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
1582 /* FIXME: Except that this causes problems elsewhere with dead
1583 code not being deleted, and we abort in the rtl expanders
1584 because we failed to remove some ssa_name. In the meantime,
1585 just return zero. */
1586 /* FIXME2: This condition should be signaled by
1587 fold_read_from_constant_string directly, rather than
1588 re-checking for it here. */
1589 return integer_zero_node
;
1592 /* Try folding *(B+O) to B->X. Still an improvement. */
1593 if (POINTER_TYPE_P (TREE_TYPE (base
)))
1595 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base
)),
1597 TREE_TYPE (expr
), true);
1603 /* Otherwise we had an offset that we could not simplify. */
1608 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1610 A quaint feature extant in our address arithmetic is that there
1611 can be hidden type changes here. The type of the result need
1612 not be the same as the type of the input pointer.
1614 What we're after here is an expression of the form
1615 (T *)(&array + const)
1616 where the cast doesn't actually exist, but is implicit in the
1617 type of the PLUS_EXPR. We'd like to turn this into
1619 which may be able to propagate further. */
1622 maybe_fold_stmt_addition (tree expr
)
1624 tree op0
= TREE_OPERAND (expr
, 0);
1625 tree op1
= TREE_OPERAND (expr
, 1);
1626 tree ptr_type
= TREE_TYPE (expr
);
1629 bool subtract
= (TREE_CODE (expr
) == MINUS_EXPR
);
1631 /* We're only interested in pointer arithmetic. */
1632 if (!POINTER_TYPE_P (ptr_type
))
1634 /* Canonicalize the integral operand to op1. */
1635 if (INTEGRAL_TYPE_P (TREE_TYPE (op0
)))
1639 t
= op0
, op0
= op1
, op1
= t
;
1641 /* It had better be a constant. */
1642 if (TREE_CODE (op1
) != INTEGER_CST
)
1644 /* The first operand should be an ADDR_EXPR. */
1645 if (TREE_CODE (op0
) != ADDR_EXPR
)
1647 op0
= TREE_OPERAND (op0
, 0);
1649 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1650 the offset into it. */
1651 while (TREE_CODE (op0
) == ARRAY_REF
)
1653 tree array_obj
= TREE_OPERAND (op0
, 0);
1654 tree array_idx
= TREE_OPERAND (op0
, 1);
1655 tree elt_type
= TREE_TYPE (op0
);
1656 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
1659 if (TREE_CODE (array_idx
) != INTEGER_CST
)
1661 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1664 /* Un-bias the index by the min index of the array type. */
1665 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
1668 min_idx
= TYPE_MIN_VALUE (min_idx
);
1671 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1674 array_idx
= convert (TREE_TYPE (min_idx
), array_idx
);
1675 if (!integer_zerop (min_idx
))
1676 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
1681 /* Convert the index to a byte offset. */
1682 array_idx
= convert (sizetype
, array_idx
);
1683 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
1685 /* Update the operands for the next round, or for folding. */
1686 /* If we're manipulating unsigned types, then folding into negative
1687 values can produce incorrect results. Particularly if the type
1688 is smaller than the width of the pointer. */
1690 && TYPE_UNSIGNED (TREE_TYPE (op1
))
1691 && tree_int_cst_lt (array_idx
, op1
))
1693 op1
= int_const_binop (subtract
? MINUS_EXPR
: PLUS_EXPR
,
1699 /* If we weren't able to fold the subtraction into another array reference,
1700 canonicalize the integer for passing to the array and component ref
1701 simplification functions. */
1704 if (TYPE_UNSIGNED (TREE_TYPE (op1
)))
1706 op1
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (op1
), op1
));
1707 /* ??? In theory fold should always produce another integer. */
1708 if (TREE_CODE (op1
) != INTEGER_CST
)
1712 ptd_type
= TREE_TYPE (ptr_type
);
1714 /* At which point we can try some of the same things as for indirects. */
1715 t
= maybe_fold_offset_to_array_ref (op0
, op1
, ptd_type
);
1717 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (op0
), op0
, op1
,
1720 t
= build1 (ADDR_EXPR
, ptr_type
, t
);
1726 /* Subroutine of fold_stmt called via walk_tree. We perform several
1727 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1730 fold_stmt_r (tree
*expr_p
, int *walk_subtrees
, void *data
)
1732 bool *changed_p
= data
;
1733 tree expr
= *expr_p
, t
;
1735 /* ??? It'd be nice if walk_tree had a pre-order option. */
1736 switch (TREE_CODE (expr
))
1739 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1744 t
= maybe_fold_stmt_indirect (expr
, TREE_OPERAND (expr
, 0),
1748 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1749 We'd only want to bother decomposing an existing ARRAY_REF if
1750 the base array is found to have another offset contained within.
1751 Otherwise we'd be wasting time. */
1754 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1759 /* Set TREE_INVARIANT properly so that the value is properly
1760 considered constant, and so gets propagated as expected. */
1762 recompute_tree_invarant_for_addr_expr (expr
);
1767 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1770 t
= walk_tree (&TREE_OPERAND (expr
, 1), fold_stmt_r
, data
, NULL
);
1775 t
= maybe_fold_stmt_addition (expr
);
1779 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1784 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
1785 We've already checked that the records are compatible, so we should
1786 come up with a set of compatible fields. */
1788 tree expr_record
= TREE_TYPE (TREE_OPERAND (expr
, 0));
1789 tree expr_field
= TREE_OPERAND (expr
, 1);
1791 if (DECL_FIELD_CONTEXT (expr_field
) != TYPE_MAIN_VARIANT (expr_record
))
1793 expr_field
= find_compatible_field (expr_record
, expr_field
);
1794 TREE_OPERAND (expr
, 1) = expr_field
;
1813 /* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
1814 follow its use-def chains. If LENGTH is not NULL and its value is not
1815 equal to the length we determine, or if we are unable to determine the
1816 length, return false. VISITED is a bitmap of visited variables. */
1819 get_strlen (tree arg
, tree
*length
, bitmap visited
)
1821 tree var
, def_stmt
, val
;
1823 if (TREE_CODE (arg
) != SSA_NAME
)
1825 val
= c_strlen (arg
, 1);
1829 if (*length
&& simple_cst_equal (val
, *length
) != 1)
1836 /* If we were already here, break the infinite cycle. */
1837 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
1839 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
1842 def_stmt
= SSA_NAME_DEF_STMT (var
);
1844 switch (TREE_CODE (def_stmt
))
1850 /* The RHS of the statement defining VAR must either have a
1851 constant length or come from another SSA_NAME with a constant
1853 rhs
= TREE_OPERAND (def_stmt
, 1);
1855 if (TREE_CODE (rhs
) == SSA_NAME
)
1856 return get_strlen (rhs
, length
, visited
);
1858 /* See if the RHS is a constant length. */
1859 len
= c_strlen (rhs
, 1);
1862 if (*length
&& simple_cst_equal (len
, *length
) != 1)
1874 /* All the arguments of the PHI node must have the same constant
1878 for (i
= 0; i
< PHI_NUM_ARGS (def_stmt
); i
++)
1880 tree arg
= PHI_ARG_DEF (def_stmt
, i
);
1882 /* If this PHI has itself as an argument, we cannot
1883 determine the string length of this argument. However,
1884 if we can find a constant string length for the other
1885 PHI args then we can still be sure that this is a
1886 constant string length. So be optimistic and just
1887 continue with the next argument. */
1888 if (arg
== PHI_RESULT (def_stmt
))
1891 if (!get_strlen (arg
, length
, visited
))
1907 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
1908 constant, return NULL_TREE. Otherwise, return its constant value. */
1911 ccp_fold_builtin (tree stmt
, tree fn
)
1913 tree result
, strlen_val
[2];
1914 tree callee
, arglist
, a
;
1919 ignore
= TREE_CODE (stmt
) != MODIFY_EXPR
;
1921 /* First try the generic builtin folder. If that succeeds, return the
1923 result
= fold_builtin (fn
, ignore
);
1927 STRIP_NOPS (result
);
1931 /* Ignore MD builtins. */
1932 callee
= get_callee_fndecl (fn
);
1933 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1936 /* If the builtin could not be folded, and it has no argument list,
1938 arglist
= TREE_OPERAND (fn
, 1);
1942 /* Limit the work only for builtins we know how to simplify. */
1943 switch (DECL_FUNCTION_CODE (callee
))
1945 case BUILT_IN_STRLEN
:
1946 case BUILT_IN_FPUTS
:
1947 case BUILT_IN_FPUTS_UNLOCKED
:
1950 case BUILT_IN_STRCPY
:
1951 case BUILT_IN_STRNCPY
:
1958 /* Try to use the dataflow information gathered by the CCP process. */
1959 visited
= BITMAP_XMALLOC ();
1961 memset (strlen_val
, 0, sizeof (strlen_val
));
1962 for (i
= 0, a
= arglist
;
1964 i
++, strlen_arg
>>= 1, a
= TREE_CHAIN (a
))
1967 bitmap_clear (visited
);
1968 if (!get_strlen (TREE_VALUE (a
), &strlen_val
[i
], visited
))
1969 strlen_val
[i
] = NULL_TREE
;
1972 BITMAP_XFREE (visited
);
1975 switch (DECL_FUNCTION_CODE (callee
))
1977 case BUILT_IN_STRLEN
:
1980 tree
new = fold_convert (TREE_TYPE (fn
), strlen_val
[0]);
1982 /* If the result is not a valid gimple value, or not a cast
1983 of a valid gimple value, then we can not use the result. */
1984 if (is_gimple_val (new)
1985 || (is_gimple_cast (new)
1986 && is_gimple_val (TREE_OPERAND (new, 0))))
1991 case BUILT_IN_STRCPY
:
1992 if (strlen_val
[1] && is_gimple_val (strlen_val
[1]))
1993 result
= fold_builtin_strcpy (fn
, strlen_val
[1]);
1996 case BUILT_IN_STRNCPY
:
1997 if (strlen_val
[1] && is_gimple_val (strlen_val
[1]))
1998 result
= fold_builtin_strncpy (fn
, strlen_val
[1]);
2001 case BUILT_IN_FPUTS
:
2002 result
= fold_builtin_fputs (arglist
,
2003 TREE_CODE (stmt
) != MODIFY_EXPR
, 0,
2007 case BUILT_IN_FPUTS_UNLOCKED
:
2008 result
= fold_builtin_fputs (arglist
,
2009 TREE_CODE (stmt
) != MODIFY_EXPR
, 1,
2017 if (result
&& ignore
)
2018 result
= fold_ignored_result (result
);
2023 /* Fold the statement pointed by STMT_P. In some cases, this function may
2024 replace the whole statement with a new one. Returns true iff folding
2025 makes any changes. */
2028 fold_stmt (tree
*stmt_p
)
2030 tree rhs
, result
, stmt
;
2031 bool changed
= false;
2035 /* If we replaced constants and the statement makes pointer dereferences,
2036 then we may need to fold instances of *&VAR into VAR, etc. */
2037 if (walk_tree (stmt_p
, fold_stmt_r
, &changed
, NULL
))
2040 = build_function_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
],
2045 rhs
= get_rhs (stmt
);
2050 if (TREE_CODE (rhs
) == CALL_EXPR
)
2054 /* Check for builtins that CCP can handle using information not
2055 available in the generic fold routines. */
2056 callee
= get_callee_fndecl (rhs
);
2057 if (callee
&& DECL_BUILT_IN (callee
))
2058 result
= ccp_fold_builtin (stmt
, rhs
);
2061 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2062 here are when we've propagated the address of a decl into the
2064 /* ??? Should perhaps do this in fold proper. However, doing it
2065 there requires that we create a new CALL_EXPR, and that requires
2066 copying EH region info to the new node. Easier to just do it
2067 here where we can just smash the call operand. */
2068 callee
= TREE_OPERAND (rhs
, 0);
2069 if (TREE_CODE (callee
) == OBJ_TYPE_REF
2070 && lang_hooks
.fold_obj_type_ref
2071 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
2072 && DECL_P (TREE_OPERAND
2073 (OBJ_TYPE_REF_OBJECT (callee
), 0)))
2077 /* ??? Caution: Broken ADDR_EXPR semantics means that
2078 looking at the type of the operand of the addr_expr
2079 can yield an array type. See silly exception in
2080 check_pointer_types_r. */
2082 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
2083 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
2086 TREE_OPERAND (rhs
, 0) = t
;
2093 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2094 if (result
== NULL_TREE
)
2095 result
= fold (rhs
);
2097 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2098 may have been added by fold, and "useless" type conversions that might
2099 now be apparent due to propagation. */
2100 STRIP_USELESS_TYPE_CONVERSION (result
);
2103 changed
|= set_rhs (stmt_p
, result
);
2109 /* Convert EXPR into a GIMPLE value suitable for substitution on the
2110 RHS of an assignment. Insert the necessary statements before
2114 convert_to_gimple_builtin (block_stmt_iterator
*si_p
, tree expr
)
2116 tree_stmt_iterator ti
;
2117 tree stmt
= bsi_stmt (*si_p
);
2118 tree tmp
, stmts
= NULL
;
2120 push_gimplify_context ();
2121 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
2122 pop_gimplify_context (NULL
);
2124 /* The replacement can expose previously unreferenced variables. */
2125 for (ti
= tsi_start (stmts
); !tsi_end_p (ti
); tsi_next (&ti
))
2127 find_new_referenced_vars (tsi_stmt_ptr (ti
));
2128 mark_new_vars_to_rename (tsi_stmt (ti
), vars_to_rename
);
2131 if (EXPR_HAS_LOCATION (stmt
))
2132 annotate_all_with_locus (&stmts
, EXPR_LOCATION (stmt
));
2134 bsi_insert_before (si_p
, stmts
, BSI_SAME_STMT
);
2140 /* A simple pass that attempts to fold all builtin functions. This pass
2141 is run after we've propagated as many constants as we can. */
2144 execute_fold_all_builtins (void)
2146 bool cfg_changed
= false;
2150 block_stmt_iterator i
;
2151 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
2153 tree
*stmtp
= bsi_stmt_ptr (i
);
2154 tree call
= get_rhs (*stmtp
);
2155 tree callee
, result
;
2157 if (!call
|| TREE_CODE (call
) != CALL_EXPR
)
2159 callee
= get_callee_fndecl (call
);
2160 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2163 result
= ccp_fold_builtin (*stmtp
, call
);
2165 switch (DECL_FUNCTION_CODE (callee
))
2167 case BUILT_IN_CONSTANT_P
:
2168 /* Resolve __builtin_constant_p. If it hasn't been
2169 folded to integer_one_node by now, it's fairly
2170 certain that the value simply isn't constant. */
2171 result
= integer_zero_node
;
2178 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2180 fprintf (dump_file
, "Simplified\n ");
2181 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2184 if (!set_rhs (stmtp
, result
))
2186 result
= convert_to_gimple_builtin (&i
, result
);
2187 if (result
&& !set_rhs (stmtp
, result
))
2190 modify_stmt (*stmtp
);
2191 if (maybe_clean_eh_stmt (*stmtp
)
2192 && tree_purge_dead_eh_edges (bb
))
2195 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2197 fprintf (dump_file
, "to\n ");
2198 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2199 fprintf (dump_file
, "\n");
2204 /* Delete unreachable blocks. */
2206 cleanup_tree_cfg ();
2210 struct tree_opt_pass pass_fold_builtins
=
2214 execute_fold_all_builtins
, /* execute */
2217 0, /* static_pass_number */
2219 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
2220 0, /* properties_provided */
2221 0, /* properties_destroyed */
2222 0, /* todo_flags_start */
2225 | TODO_rename_vars
, /* todo_flags_finish */