1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 2, or (at your option) any
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* Conditional constant propagation.
27 Constant propagation with conditional branches,
28 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
30 Building an Optimizing Compiler,
31 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
33 Advanced Compiler Design and Implementation,
34 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
38 #include "coretypes.h"
45 #include "basic-block.h"
50 #include "diagnostic.h"
52 #include "tree-dump.h"
53 #include "tree-flow.h"
54 #include "tree-pass.h"
55 #include "tree-ssa-propagate.h"
56 #include "langhooks.h"
59 /* Possible lattice values. */
69 /* Main structure for CCP. Contains the lattice value and, if it's a
70 constant, the constant value. */
73 latticevalue lattice_val
;
77 /* This is used to track the current value of each variable. */
78 static value
*value_vector
;
81 /* Dump lattice value VAL to file OUTF prefixed by PREFIX. */
84 dump_lattice_value (FILE *outf
, const char *prefix
, value val
)
86 switch (val
.lattice_val
)
89 fprintf (outf
, "%sUNDEFINED", prefix
);
92 fprintf (outf
, "%sVARYING", prefix
);
95 fprintf (outf
, "%sUNKNOWN_VAL", prefix
);
98 fprintf (outf
, "%sCONSTANT ", prefix
);
99 print_generic_expr (outf
, val
.const_val
, dump_flags
);
107 /* Return a default value for variable VAR using the following rules:
109 1- Function arguments are considered VARYING.
111 2- Global and static variables that are declared constant are
114 3- Any other virtually defined variable is considered UNKNOWN_VAL.
116 4- Any other value is considered UNDEFINED. This is useful when
117 considering PHI nodes. PHI arguments that are undefined do not
118 change the constant value of the PHI node, which allows for more
119 constants to be propagated. */
122 get_default_value (tree var
)
127 if (TREE_CODE (var
) == SSA_NAME
)
128 sym
= SSA_NAME_VAR (var
);
131 gcc_assert (DECL_P (var
));
135 val
.lattice_val
= UNDEFINED
;
136 val
.const_val
= NULL_TREE
;
138 if (TREE_CODE (sym
) == PARM_DECL
|| TREE_THIS_VOLATILE (sym
))
140 /* Function arguments and volatile variables are considered VARYING. */
141 val
.lattice_val
= VARYING
;
143 else if (TREE_STATIC (sym
))
145 /* Globals and static variables are considered UNKNOWN_VAL,
146 unless they are declared 'const'. */
147 if (TREE_READONLY (sym
)
148 && DECL_INITIAL (sym
)
149 && is_gimple_min_invariant (DECL_INITIAL (sym
)))
151 val
.lattice_val
= CONSTANT
;
152 val
.const_val
= DECL_INITIAL (sym
);
156 val
.const_val
= NULL_TREE
;
157 val
.lattice_val
= UNKNOWN_VAL
;
160 else if (!is_gimple_reg (sym
))
162 val
.const_val
= NULL_TREE
;
163 val
.lattice_val
= UNKNOWN_VAL
;
168 tree stmt
= SSA_NAME_DEF_STMT (var
);
170 if (!IS_EMPTY_STMT (stmt
))
172 code
= TREE_CODE (stmt
);
173 if (code
!= MODIFY_EXPR
&& code
!= PHI_NODE
)
174 val
.lattice_val
= VARYING
;
181 /* Get the constant value associated with variable VAR. */
188 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
190 val
= &value_vector
[SSA_NAME_VERSION (var
)];
191 if (val
->lattice_val
== UNINITIALIZED
)
192 *val
= get_default_value (var
);
198 /* Set the lattice value for variable VAR to VAL. Return true if VAL
199 is different from VAR's previous value. */
202 set_lattice_value (tree var
, value val
)
204 value
*old
= get_value (var
);
206 if (val
.lattice_val
== UNDEFINED
)
208 /* CONSTANT->UNDEFINED is never a valid state transition. */
209 gcc_assert (old
->lattice_val
!= CONSTANT
);
211 /* UNKNOWN_VAL->UNDEFINED is never a valid state transition. */
212 gcc_assert (old
->lattice_val
!= UNKNOWN_VAL
);
214 /* VARYING->UNDEFINED is generally not a valid state transition,
215 except for values which are initialized to VARYING. */
216 gcc_assert (old
->lattice_val
!= VARYING
217 || get_default_value (var
).lattice_val
== VARYING
);
219 else if (val
.lattice_val
== CONSTANT
)
220 /* VARYING -> CONSTANT is an invalid state transition, except
221 for objects which start off in a VARYING state. */
222 gcc_assert (old
->lattice_val
!= VARYING
223 || get_default_value (var
).lattice_val
== VARYING
);
225 /* If the constant for VAR has changed, then this VAR is really varying. */
226 if (old
->lattice_val
== CONSTANT
227 && val
.lattice_val
== CONSTANT
228 && !simple_cst_equal (old
->const_val
, val
.const_val
))
230 val
.lattice_val
= VARYING
;
231 val
.const_val
= NULL_TREE
;
234 if (old
->lattice_val
!= val
.lattice_val
)
236 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
238 dump_lattice_value (dump_file
, "Lattice value changed to ", val
);
239 fprintf (dump_file
, ". Adding definition to SSA edges.\n");
250 /* Set the lattice value for the variable VAR to VARYING. */
253 def_to_varying (tree var
)
256 val
.lattice_val
= VARYING
;
257 val
.const_val
= NULL_TREE
;
258 set_lattice_value (var
, val
);
262 /* Return the likely latticevalue for STMT.
264 If STMT has no operands, then return CONSTANT.
266 Else if any operands of STMT are undefined, then return UNDEFINED.
268 Else if any operands of STMT are constants, then return CONSTANT.
270 Else return VARYING. */
273 likely_value (tree stmt
)
276 int found_constant
= 0;
281 /* If the statement makes aliased loads or has volatile operands, it
282 won't fold to a constant value. */
283 ann
= stmt_ann (stmt
);
284 if (ann
->makes_aliased_loads
|| ann
->has_volatile_ops
)
287 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
288 in the presence of const and pure calls. */
289 if (get_call_expr_in (stmt
) != NULL_TREE
)
292 get_stmt_operands (stmt
);
294 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
296 value
*val
= get_value (use
);
298 if (val
->lattice_val
== UNDEFINED
)
301 if (val
->lattice_val
== CONSTANT
)
305 vuses
= VUSE_OPS (ann
);
307 if (NUM_VUSES (vuses
))
309 tree vuse
= VUSE_OP (vuses
, 0);
310 value
*val
= get_value (vuse
);
312 if (val
->lattice_val
== UNKNOWN_VAL
)
315 /* There should be no VUSE operands that are UNDEFINED. */
316 gcc_assert (val
->lattice_val
!= UNDEFINED
);
318 if (val
->lattice_val
== CONSTANT
)
322 return ((found_constant
|| (!USE_OPS (ann
) && !vuses
)) ? CONSTANT
: VARYING
);
326 /* Function indicating whether we ought to include information for VAR
327 when calculating immediate uses. */
330 need_imm_uses_for (tree var
)
332 return get_value (var
)->lattice_val
!= VARYING
;
336 /* Initialize local data structures for CCP. */
339 ccp_initialize (void)
344 value_vector
= (value
*) xmalloc (num_ssa_names
* sizeof (value
));
345 memset (value_vector
, 0, num_ssa_names
* sizeof (value
));
347 /* Set of SSA_NAMEs that are defined by a V_MAY_DEF. */
348 is_may_def
= sbitmap_alloc (num_ssa_names
);
349 sbitmap_zero (is_may_def
);
351 /* Initialize simulation flags for PHI nodes and statements. */
354 block_stmt_iterator i
;
356 /* Mark all V_MAY_DEF operands VARYING. */
357 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
359 bool is_varying
= false;
360 tree stmt
= bsi_stmt (i
);
364 get_stmt_operands (stmt
);
366 /* Get the default value for each DEF and V_MUST_DEF. */
367 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
,
368 (SSA_OP_DEF
| SSA_OP_VMUSTDEF
))
370 if (get_value (def
)->lattice_val
== VARYING
)
374 /* Mark all V_MAY_DEF operands VARYING. */
375 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_VMAYDEF
)
377 get_value (def
)->lattice_val
= VARYING
;
378 SET_BIT (is_may_def
, SSA_NAME_VERSION (def
));
381 /* Statements other than MODIFY_EXPR, COND_EXPR and
382 SWITCH_EXPR are not interesting for constant propagation.
383 Mark them VARYING. */
384 if (TREE_CODE (stmt
) != MODIFY_EXPR
385 && TREE_CODE (stmt
) != COND_EXPR
386 && TREE_CODE (stmt
) != SWITCH_EXPR
)
389 DONT_SIMULATE_AGAIN (stmt
) = is_varying
;
393 /* Now process PHI nodes. */
399 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
401 value
*val
= get_value (PHI_RESULT (phi
));
403 for (x
= 0; x
< PHI_NUM_ARGS (phi
); x
++)
405 var
= PHI_ARG_DEF (phi
, x
);
407 /* If one argument has a V_MAY_DEF, the result is
409 if (TREE_CODE (var
) == SSA_NAME
)
411 if (TEST_BIT (is_may_def
, SSA_NAME_VERSION (var
)))
413 val
->lattice_val
= VARYING
;
414 SET_BIT (is_may_def
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
420 DONT_SIMULATE_AGAIN (phi
) = (val
->lattice_val
== VARYING
);
424 sbitmap_free (is_may_def
);
426 /* Compute immediate uses for variables we care about. */
427 compute_immediate_uses (TDFA_USE_OPS
| TDFA_USE_VOPS
, need_imm_uses_for
);
431 /* Replace USE references in statement STMT with their immediate reaching
432 definition. Return true if at least one reference was replaced. If
433 REPLACED_ADDRESSES_P is given, it will be set to true if an address
434 constant was replaced. */
437 replace_uses_in (tree stmt
, bool *replaced_addresses_p
)
439 bool replaced
= false;
443 if (replaced_addresses_p
)
444 *replaced_addresses_p
= false;
446 get_stmt_operands (stmt
);
448 FOR_EACH_SSA_USE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
450 value
*val
= get_value (USE_FROM_PTR (use
));
452 if (val
->lattice_val
== CONSTANT
)
454 SET_USE (use
, val
->const_val
);
456 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (use
)))
457 && replaced_addresses_p
)
458 *replaced_addresses_p
= true;
466 /* Replace the VUSE references in statement STMT with its immediate reaching
467 definition. Return true if the reference was replaced. If
468 REPLACED_ADDRESSES_P is given, it will be set to true if an address
469 constant was replaced. */
472 replace_vuse_in (tree stmt
, bool *replaced_addresses_p
)
474 bool replaced
= false;
479 if (replaced_addresses_p
)
480 *replaced_addresses_p
= false;
482 get_stmt_operands (stmt
);
484 vuses
= STMT_VUSE_OPS (stmt
);
486 if (NUM_VUSES (vuses
) != 1)
489 vuse
= VUSE_OP_PTR (vuses
, 0);
490 val
= get_value (USE_FROM_PTR (vuse
));
492 if (val
->lattice_val
== CONSTANT
493 && TREE_CODE (stmt
) == MODIFY_EXPR
494 && DECL_P (TREE_OPERAND (stmt
, 1))
495 && TREE_OPERAND (stmt
, 1) == SSA_NAME_VAR (USE_FROM_PTR (vuse
)))
497 TREE_OPERAND (stmt
, 1) = val
->const_val
;
499 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (vuse
)))
500 && replaced_addresses_p
)
501 *replaced_addresses_p
= true;
508 /* Perform final substitution and folding. After this pass the program
509 should still be in SSA form. */
512 substitute_and_fold (void)
516 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
518 "\nSubstituing constants and folding statements\n\n");
520 /* Substitute constants in every statement of every basic block. */
523 block_stmt_iterator i
;
526 /* Propagate our known constants into PHI nodes. */
527 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
531 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
534 use_operand_p orig_p
= PHI_ARG_DEF_PTR (phi
, i
);
535 tree orig
= USE_FROM_PTR (orig_p
);
537 if (! SSA_VAR_P (orig
))
540 new_val
= get_value (orig
);
541 if (new_val
->lattice_val
== CONSTANT
542 && may_propagate_copy (orig
, new_val
->const_val
))
543 SET_USE (orig_p
, new_val
->const_val
);
547 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
549 bool replaced_address
;
550 tree stmt
= bsi_stmt (i
);
552 /* Skip statements that have been folded already. */
553 if (stmt_modified_p (stmt
) || !is_exec_stmt (stmt
))
556 /* Replace the statement with its folded version and mark it
558 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
560 fprintf (dump_file
, "Line %d: replaced ", get_lineno (stmt
));
561 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
564 if (replace_uses_in (stmt
, &replaced_address
)
565 || replace_vuse_in (stmt
, &replaced_address
))
567 bool changed
= fold_stmt (bsi_stmt_ptr (i
));
569 /* If we folded a builtin function, we'll likely
570 need to rename VDEFs. */
571 if (replaced_address
|| changed
)
573 mark_new_vars_to_rename (stmt
, vars_to_rename
);
574 if (maybe_clean_eh_stmt (stmt
))
575 tree_purge_dead_eh_edges (bb
);
581 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
583 fprintf (dump_file
, " with ");
584 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
585 fprintf (dump_file
, "\n");
592 /* Free allocated storage. */
597 /* Perform substitutions based on the known constant values. */
598 substitute_and_fold ();
600 /* Now cleanup any unreachable code. */
608 /* Compute the meet operator between VAL1 and VAL2:
610 any M UNDEFINED = any
611 any M VARYING = VARYING
612 any M UNKNOWN_VAL = UNKNOWN_VAL
613 Ci M Cj = Ci if (i == j)
614 Ci M Cj = VARYING if (i != j) */
616 ccp_lattice_meet (value val1
, value val2
)
620 /* any M UNDEFINED = any. */
621 if (val1
.lattice_val
== UNDEFINED
)
623 else if (val2
.lattice_val
== UNDEFINED
)
626 /* any M VARYING = VARYING. */
627 if (val1
.lattice_val
== VARYING
|| val2
.lattice_val
== VARYING
)
629 result
.lattice_val
= VARYING
;
630 result
.const_val
= NULL_TREE
;
634 /* any M UNKNOWN_VAL = UNKNOWN_VAL. */
635 if (val1
.lattice_val
== UNKNOWN_VAL
636 || val2
.lattice_val
== UNKNOWN_VAL
)
638 result
.lattice_val
= UNKNOWN_VAL
;
639 result
.const_val
= NULL_TREE
;
643 /* Ci M Cj = Ci if (i == j)
644 Ci M Cj = VARYING if (i != j) */
645 if (simple_cst_equal (val1
.const_val
, val2
.const_val
) == 1)
647 result
.lattice_val
= CONSTANT
;
648 result
.const_val
= val1
.const_val
;
652 result
.lattice_val
= VARYING
;
653 result
.const_val
= NULL_TREE
;
660 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
661 lattice values to determine PHI_NODE's lattice value. The value of a
662 PHI node is determined calling ccp_lattice_meet() with all the arguments
663 of the PHI node that are incoming via executable edges. */
665 static enum ssa_prop_result
666 ccp_visit_phi_node (tree phi
)
668 value new_val
, *old_val
;
671 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
673 fprintf (dump_file
, "\nVisiting PHI node: ");
674 print_generic_expr (dump_file
, phi
, dump_flags
);
677 old_val
= get_value (PHI_RESULT (phi
));
678 switch (old_val
->lattice_val
)
681 return SSA_PROP_NOT_INTERESTING
;
688 /* To avoid the default value of UNKNOWN_VAL overriding
689 that of its possible constant arguments, temporarily
690 set the PHI node's default lattice value to be
691 UNDEFINED. If the PHI node's old value was UNKNOWN_VAL and
692 the new value is UNDEFINED, then we prevent the invalid
693 transition by not calling set_lattice_value. */
694 new_val
.lattice_val
= UNDEFINED
;
695 new_val
.const_val
= NULL_TREE
;
700 new_val
.lattice_val
= UNDEFINED
;
701 new_val
.const_val
= NULL_TREE
;
708 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
710 /* Compute the meet operator over all the PHI arguments. */
711 edge e
= PHI_ARG_EDGE (phi
, i
);
713 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
716 "\n Argument #%d (%d -> %d %sexecutable)\n",
717 i
, e
->src
->index
, e
->dest
->index
,
718 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
721 /* If the incoming edge is executable, Compute the meet operator for
722 the existing value of the PHI node and the current PHI argument. */
723 if (e
->flags
& EDGE_EXECUTABLE
)
725 tree rdef
= PHI_ARG_DEF (phi
, i
);
726 value
*rdef_val
, val
;
728 if (is_gimple_min_invariant (rdef
))
730 val
.lattice_val
= CONSTANT
;
731 val
.const_val
= rdef
;
735 rdef_val
= get_value (rdef
);
737 new_val
= ccp_lattice_meet (new_val
, *rdef_val
);
739 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
741 fprintf (dump_file
, "\t");
742 print_generic_expr (dump_file
, rdef
, dump_flags
);
743 dump_lattice_value (dump_file
, "\tValue: ", *rdef_val
);
744 fprintf (dump_file
, "\n");
747 if (new_val
.lattice_val
== VARYING
)
752 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
754 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
755 fprintf (dump_file
, "\n\n");
758 /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
759 if (old_val
->lattice_val
== UNKNOWN_VAL
760 && new_val
.lattice_val
== UNDEFINED
)
761 return SSA_PROP_NOT_INTERESTING
;
763 /* Otherwise, make the transition to the new value. */
764 if (set_lattice_value (PHI_RESULT (phi
), new_val
))
766 if (new_val
.lattice_val
== VARYING
)
767 return SSA_PROP_VARYING
;
769 return SSA_PROP_INTERESTING
;
772 return SSA_PROP_NOT_INTERESTING
;
776 /* CCP specific front-end to the non-destructive constant folding
779 Attempt to simplify the RHS of STMT knowing that one or more
780 operands are constants.
782 If simplification is possible, return the simplified RHS,
783 otherwise return the original RHS. */
788 tree rhs
= get_rhs (stmt
);
789 enum tree_code code
= TREE_CODE (rhs
);
790 int kind
= TREE_CODE_CLASS (code
);
791 tree retval
= NULL_TREE
;
794 vuses
= STMT_VUSE_OPS (stmt
);
796 /* If the RHS is just a variable, then that variable must now have
797 a constant value that we can return directly. */
798 if (TREE_CODE (rhs
) == SSA_NAME
)
799 return get_value (rhs
)->const_val
;
800 else if (DECL_P (rhs
)
801 && NUM_VUSES (vuses
) == 1
802 && rhs
== SSA_NAME_VAR (VUSE_OP (vuses
, 0)))
803 return get_value (VUSE_OP (vuses
, 0))->const_val
;
805 /* Unary operators. Note that we know the single operand must
806 be a constant. So this should almost always return a
810 /* Handle unary operators which can appear in GIMPLE form. */
811 tree op0
= TREE_OPERAND (rhs
, 0);
813 /* Simplify the operand down to a constant. */
814 if (TREE_CODE (op0
) == SSA_NAME
)
816 value
*val
= get_value (op0
);
817 if (val
->lattice_val
== CONSTANT
)
818 op0
= get_value (op0
)->const_val
;
821 retval
= nondestructive_fold_unary_to_constant (code
,
825 /* If we folded, but did not create an invariant, then we can not
826 use this expression. */
827 if (retval
&& ! is_gimple_min_invariant (retval
))
830 /* If we could not fold the expression, but the arguments are all
831 constants and gimple values, then build and return the new
834 In some cases the new expression is still something we can
835 use as a replacement for an argument. This happens with
836 NOP conversions of types for example.
838 In other cases the new expression can not be used as a
839 replacement for an argument (as it would create non-gimple
840 code). But the new expression can still be used to derive
842 if (! retval
&& is_gimple_min_invariant (op0
))
843 return build1 (code
, TREE_TYPE (rhs
), op0
);
846 /* Binary and comparison operators. We know one or both of the
847 operands are constants. */
850 || code
== TRUTH_AND_EXPR
851 || code
== TRUTH_OR_EXPR
852 || code
== TRUTH_XOR_EXPR
)
854 /* Handle binary and comparison operators that can appear in
856 tree op0
= TREE_OPERAND (rhs
, 0);
857 tree op1
= TREE_OPERAND (rhs
, 1);
859 /* Simplify the operands down to constants when appropriate. */
860 if (TREE_CODE (op0
) == SSA_NAME
)
862 value
*val
= get_value (op0
);
863 if (val
->lattice_val
== CONSTANT
)
864 op0
= val
->const_val
;
867 if (TREE_CODE (op1
) == SSA_NAME
)
869 value
*val
= get_value (op1
);
870 if (val
->lattice_val
== CONSTANT
)
871 op1
= val
->const_val
;
874 retval
= nondestructive_fold_binary_to_constant (code
,
878 /* If we folded, but did not create an invariant, then we can not
879 use this expression. */
880 if (retval
&& ! is_gimple_min_invariant (retval
))
883 /* If we could not fold the expression, but the arguments are all
884 constants and gimple values, then build and return the new
887 In some cases the new expression is still something we can
888 use as a replacement for an argument. This happens with
889 NOP conversions of types for example.
891 In other cases the new expression can not be used as a
892 replacement for an argument (as it would create non-gimple
893 code). But the new expression can still be used to derive
896 && is_gimple_min_invariant (op0
)
897 && is_gimple_min_invariant (op1
))
898 return build (code
, TREE_TYPE (rhs
), op0
, op1
);
901 /* We may be able to fold away calls to builtin functions if their
902 arguments are constants. */
903 else if (code
== CALL_EXPR
904 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == ADDR_EXPR
905 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0))
907 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)))
909 use_optype uses
= STMT_USE_OPS (stmt
);
910 if (NUM_USES (uses
) != 0)
915 /* Preserve the original values of every operand. */
916 orig
= xmalloc (sizeof (tree
) * NUM_USES (uses
));
917 for (i
= 0; i
< NUM_USES (uses
); i
++)
918 orig
[i
] = USE_OP (uses
, i
);
920 /* Substitute operands with their values and try to fold. */
921 replace_uses_in (stmt
, NULL
);
922 retval
= fold_builtin (rhs
, false);
924 /* Restore operands to their original form. */
925 for (i
= 0; i
< NUM_USES (uses
); i
++)
926 SET_USE_OP (uses
, i
, orig
[i
]);
933 /* If we got a simplified form, see if we need to convert its type. */
935 return fold_convert (TREE_TYPE (rhs
), retval
);
937 /* No simplification was possible. */
942 /* Evaluate statement STMT. */
945 evaluate_stmt (tree stmt
)
949 latticevalue likelyvalue
= likely_value (stmt
);
951 /* If the statement is likely to have a CONSTANT result, then try
952 to fold the statement to determine the constant value. */
953 if (likelyvalue
== CONSTANT
)
954 simplified
= ccp_fold (stmt
);
955 /* If the statement is likely to have a VARYING result, then do not
956 bother folding the statement. */
957 else if (likelyvalue
== VARYING
)
958 simplified
= get_rhs (stmt
);
959 /* Otherwise the statement is likely to have an UNDEFINED value and
960 there will be nothing to do. */
962 simplified
= NULL_TREE
;
964 if (simplified
&& is_gimple_min_invariant (simplified
))
966 /* The statement produced a constant value. */
967 val
.lattice_val
= CONSTANT
;
968 val
.const_val
= simplified
;
972 /* The statement produced a nonconstant value. If the statement
973 had undefined or virtual operands, then the result of the
974 statement should be undefined or virtual respectively.
975 Else the result of the statement is VARYING. */
976 val
.lattice_val
= (likelyvalue
== UNDEFINED
? UNDEFINED
: VARYING
);
977 val
.lattice_val
= (likelyvalue
== UNKNOWN_VAL
978 ? UNKNOWN_VAL
: val
.lattice_val
);
979 val
.const_val
= NULL_TREE
;
986 /* Visit the assignment statement STMT. Set the value of its LHS to the
987 value computed by the RHS and store LHS in *OUTPUT_P. */
989 static enum ssa_prop_result
990 visit_assignment (tree stmt
, tree
*output_p
)
995 v_must_def_optype v_must_defs
;
997 lhs
= TREE_OPERAND (stmt
, 0);
998 rhs
= TREE_OPERAND (stmt
, 1);
999 vuses
= STMT_VUSE_OPS (stmt
);
1000 v_must_defs
= STMT_V_MUST_DEF_OPS (stmt
);
1002 gcc_assert (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt
)) == 0);
1003 gcc_assert (NUM_V_MUST_DEFS (v_must_defs
) == 1
1004 || TREE_CODE (lhs
) == SSA_NAME
);
1006 /* We require the SSA version number of the lhs for the value_vector.
1007 Make sure we have it. */
1008 if (TREE_CODE (lhs
) != SSA_NAME
)
1010 /* If we make it here, then stmt only has one definition:
1012 lhs
= V_MUST_DEF_OP (v_must_defs
, 0);
1015 if (TREE_CODE (rhs
) == SSA_NAME
)
1017 /* For a simple copy operation, we copy the lattice values. */
1018 value
*nval
= get_value (rhs
);
1021 else if (DECL_P (rhs
)
1022 && NUM_VUSES (vuses
) == 1
1023 && rhs
== SSA_NAME_VAR (VUSE_OP (vuses
, 0)))
1025 /* Same as above, but the rhs is not a gimple register and yet
1026 has a known VUSE. */
1027 value
*nval
= get_value (VUSE_OP (vuses
, 0));
1032 /* Evaluate the statement. */
1033 val
= evaluate_stmt (stmt
);
1036 /* FIXME: Hack. If this was a definition of a bitfield, we need to widen
1037 the constant value into the type of the destination variable. This
1038 should not be necessary if GCC represented bitfields properly. */
1040 tree lhs
= TREE_OPERAND (stmt
, 0);
1041 if (val
.lattice_val
== CONSTANT
1042 && TREE_CODE (lhs
) == COMPONENT_REF
1043 && DECL_BIT_FIELD (TREE_OPERAND (lhs
, 1)))
1045 tree w
= widen_bitfield (val
.const_val
, TREE_OPERAND (lhs
, 1), lhs
);
1047 if (w
&& is_gimple_min_invariant (w
))
1051 val
.lattice_val
= VARYING
;
1052 val
.const_val
= NULL
;
1057 /* If LHS is not a gimple register, then it cannot take on an
1059 if (!is_gimple_reg (SSA_NAME_VAR (lhs
))
1060 && val
.lattice_val
== UNDEFINED
)
1061 val
.lattice_val
= UNKNOWN_VAL
;
1063 /* Set the lattice value of the statement's output. */
1064 if (set_lattice_value (lhs
, val
))
1067 if (val
.lattice_val
== VARYING
)
1068 return SSA_PROP_VARYING
;
1070 return SSA_PROP_INTERESTING
;
1073 return SSA_PROP_NOT_INTERESTING
;
1077 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1078 if it can determine which edge will be taken. Otherwise, return
1079 SSA_PROP_VARYING. */
1081 static enum ssa_prop_result
1082 visit_cond_stmt (tree stmt
, edge
*taken_edge_p
)
1087 block
= bb_for_stmt (stmt
);
1088 val
= evaluate_stmt (stmt
);
1090 /* Find which edge out of the conditional block will be taken and add it
1091 to the worklist. If no single edge can be determined statically,
1092 return SSA_PROP_VARYING to feed all the outgoing edges to the
1093 propagation engine. */
1094 *taken_edge_p
= find_taken_edge (block
, val
.const_val
);
1096 return SSA_PROP_INTERESTING
;
1098 return SSA_PROP_VARYING
;
1102 /* Evaluate statement STMT. If the statement produces an output value and
1103 its evaluation changes the lattice value of its output, return
1104 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1107 If STMT is a conditional branch and we can determine its truth
1108 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1109 value, return SSA_PROP_VARYING. */
1111 static enum ssa_prop_result
1112 ccp_visit_stmt (tree stmt
, edge
*taken_edge_p
, tree
*output_p
)
1115 v_may_def_optype v_may_defs
;
1116 v_must_def_optype v_must_defs
;
1120 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1122 fprintf (dump_file
, "\nVisiting statement: ");
1123 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
1124 fprintf (dump_file
, "\n");
1127 ann
= stmt_ann (stmt
);
1129 v_must_defs
= V_MUST_DEF_OPS (ann
);
1130 v_may_defs
= V_MAY_DEF_OPS (ann
);
1131 if (TREE_CODE (stmt
) == MODIFY_EXPR
1132 && NUM_V_MAY_DEFS (v_may_defs
) == 0
1133 && (NUM_V_MUST_DEFS (v_must_defs
) == 1
1134 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == SSA_NAME
))
1136 /* If the statement is an assignment that produces a single
1137 output value, evaluate its RHS to see if the lattice value of
1138 its output has changed. */
1139 return visit_assignment (stmt
, output_p
);
1141 else if (TREE_CODE (stmt
) == COND_EXPR
|| TREE_CODE (stmt
) == SWITCH_EXPR
)
1143 /* If STMT is a conditional branch, see if we can determine
1144 which branch will be taken. */
1145 return visit_cond_stmt (stmt
, taken_edge_p
);
1148 /* Any other kind of statement is not interesting for constant
1149 propagation and, therefore, not worth simulating. */
1150 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1151 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1153 /* Definitions made by statements other than assignments to
1154 SSA_NAMEs represent unknown modifications to their outputs.
1155 Mark them VARYING. */
1156 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_DEF
)
1157 def_to_varying (def
);
1159 /* Mark all V_MAY_DEF operands VARYING. */
1160 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_VMAYDEF
)
1161 def_to_varying (def
);
1163 return SSA_PROP_VARYING
;
1167 /* Main entry point for SSA Conditional Constant Propagation.
1169 [ DESCRIBE MAIN ALGORITHM HERE ] */
1172 execute_ssa_ccp (void)
1175 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1183 return flag_tree_ccp
!= 0;
1187 struct tree_opt_pass pass_ccp
=
1190 gate_ccp
, /* gate */
1191 execute_ssa_ccp
, /* execute */
1194 0, /* static_pass_number */
1195 TV_TREE_CCP
, /* tv_id */
1196 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
1197 0, /* properties_provided */
1198 0, /* properties_destroyed */
1199 0, /* todo_flags_start */
1200 TODO_dump_func
| TODO_rename_vars
1201 | TODO_ggc_collect
| TODO_verify_ssa
1202 | TODO_verify_stmts
, /* todo_flags_finish */
1207 /* Given a constant value VAL for bitfield FIELD, and a destination
1208 variable VAR, return VAL appropriately widened to fit into VAR. If
1209 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1212 widen_bitfield (tree val
, tree field
, tree var
)
1214 unsigned HOST_WIDE_INT var_size
, field_size
;
1216 unsigned HOST_WIDE_INT mask
;
1219 /* We can only do this if the size of the type and field and VAL are
1220 all constants representable in HOST_WIDE_INT. */
1221 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var
)), 1)
1222 || !host_integerp (DECL_SIZE (field
), 1)
1223 || !host_integerp (val
, 0))
1226 var_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1);
1227 field_size
= tree_low_cst (DECL_SIZE (field
), 1);
1229 /* Give up if either the bitfield or the variable are too wide. */
1230 if (field_size
> HOST_BITS_PER_WIDE_INT
|| var_size
> HOST_BITS_PER_WIDE_INT
)
1233 gcc_assert (var_size
>= field_size
);
1235 /* If the sign bit of the value is not set or the field's type is unsigned,
1236 just mask off the high order bits of the value. */
1237 if (DECL_UNSIGNED (field
)
1238 || !(tree_low_cst (val
, 0) & (((HOST_WIDE_INT
)1) << (field_size
- 1))))
1240 /* Zero extension. Build a mask with the lower 'field_size' bits
1241 set and a BIT_AND_EXPR node to clear the high order bits of
1243 for (i
= 0, mask
= 0; i
< field_size
; i
++)
1244 mask
|= ((HOST_WIDE_INT
) 1) << i
;
1246 wide_val
= build (BIT_AND_EXPR
, TREE_TYPE (var
), val
,
1247 fold_convert (TREE_TYPE (var
),
1248 build_int_cst (NULL_TREE
, mask
)));
1252 /* Sign extension. Create a mask with the upper 'field_size'
1253 bits set and a BIT_IOR_EXPR to set the high order bits of the
1255 for (i
= 0, mask
= 0; i
< (var_size
- field_size
); i
++)
1256 mask
|= ((HOST_WIDE_INT
) 1) << (var_size
- i
- 1);
1258 wide_val
= build (BIT_IOR_EXPR
, TREE_TYPE (var
), val
,
1259 fold_convert (TREE_TYPE (var
),
1260 build_int_cst (NULL_TREE
, mask
)));
1263 return fold (wide_val
);
1267 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1268 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1269 is the desired result type. */
1272 maybe_fold_offset_to_array_ref (tree base
, tree offset
, tree orig_type
)
1274 tree min_idx
, idx
, elt_offset
= integer_zero_node
;
1275 tree array_type
, elt_type
, elt_size
;
1277 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1278 measured in units of the size of elements type) from that ARRAY_REF).
1279 We can't do anything if either is variable.
1281 The case we handle here is *(&A[N]+O). */
1282 if (TREE_CODE (base
) == ARRAY_REF
)
1284 tree low_bound
= array_ref_low_bound (base
);
1286 elt_offset
= TREE_OPERAND (base
, 1);
1287 if (TREE_CODE (low_bound
) != INTEGER_CST
1288 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1291 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1292 base
= TREE_OPERAND (base
, 0);
1295 /* Ignore stupid user tricks of indexing non-array variables. */
1296 array_type
= TREE_TYPE (base
);
1297 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1299 elt_type
= TREE_TYPE (array_type
);
1300 if (!lang_hooks
.types_compatible_p (orig_type
, elt_type
))
1303 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1304 element type (so we can use the alignment if it's not constant).
1305 Otherwise, compute the offset as an index by using a division. If the
1306 division isn't exact, then don't do anything. */
1307 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1308 if (integer_zerop (offset
))
1310 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1311 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1313 idx
= integer_zero_node
;
1317 unsigned HOST_WIDE_INT lquo
, lrem
;
1318 HOST_WIDE_INT hquo
, hrem
;
1320 if (TREE_CODE (elt_size
) != INTEGER_CST
1321 || div_and_round_double (TRUNC_DIV_EXPR
, 1,
1322 TREE_INT_CST_LOW (offset
),
1323 TREE_INT_CST_HIGH (offset
),
1324 TREE_INT_CST_LOW (elt_size
),
1325 TREE_INT_CST_HIGH (elt_size
),
1326 &lquo
, &hquo
, &lrem
, &hrem
)
1330 idx
= build_int_cst_wide (NULL_TREE
, lquo
, hquo
);
1333 /* Assume the low bound is zero. If there is a domain type, get the
1334 low bound, if any, convert the index into that type, and add the
1336 min_idx
= integer_zero_node
;
1337 if (TYPE_DOMAIN (array_type
))
1339 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
)))
1340 min_idx
= TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
));
1342 min_idx
= fold_convert (TYPE_DOMAIN (array_type
), min_idx
);
1344 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1347 idx
= fold_convert (TYPE_DOMAIN (array_type
), idx
);
1348 elt_offset
= fold_convert (TYPE_DOMAIN (array_type
), elt_offset
);
1351 if (!integer_zerop (min_idx
))
1352 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1353 if (!integer_zerop (elt_offset
))
1354 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1356 return build (ARRAY_REF
, orig_type
, base
, idx
, min_idx
,
1357 size_int (tree_low_cst (elt_size
, 1)
1358 / (TYPE_ALIGN_UNIT (elt_type
))));
1362 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1363 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1364 is the desired result type. */
1365 /* ??? This doesn't handle class inheritance. */
1368 maybe_fold_offset_to_component_ref (tree record_type
, tree base
, tree offset
,
1369 tree orig_type
, bool base_is_ptr
)
1371 tree f
, t
, field_type
, tail_array_field
;
1373 if (TREE_CODE (record_type
) != RECORD_TYPE
1374 && TREE_CODE (record_type
) != UNION_TYPE
1375 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1378 /* Short-circuit silly cases. */
1379 if (lang_hooks
.types_compatible_p (record_type
, orig_type
))
1382 tail_array_field
= NULL_TREE
;
1383 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1387 if (TREE_CODE (f
) != FIELD_DECL
)
1389 if (DECL_BIT_FIELD (f
))
1391 if (TREE_CODE (DECL_FIELD_OFFSET (f
)) != INTEGER_CST
)
1394 /* ??? Java creates "interesting" fields for representing base classes.
1395 They have no name, and have no context. With no context, we get into
1396 trouble with nonoverlapping_component_refs_p. Skip them. */
1397 if (!DECL_FIELD_CONTEXT (f
))
1400 /* The previous array field isn't at the end. */
1401 tail_array_field
= NULL_TREE
;
1403 /* Check to see if this offset overlaps with the field. */
1404 cmp
= tree_int_cst_compare (DECL_FIELD_OFFSET (f
), offset
);
1408 field_type
= TREE_TYPE (f
);
1411 /* Don't care about offsets into the middle of scalars. */
1412 if (!AGGREGATE_TYPE_P (field_type
))
1415 /* Check for array at the end of the struct. This is often
1416 used as for flexible array members. We should be able to
1417 turn this into an array access anyway. */
1418 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1419 tail_array_field
= f
;
1421 /* Check the end of the field against the offset. */
1422 if (!DECL_SIZE_UNIT (f
)
1423 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1425 t
= int_const_binop (MINUS_EXPR
, offset
, DECL_FIELD_OFFSET (f
), 1);
1426 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
1429 /* If we matched, then set offset to the displacement into
1434 /* Here we exactly match the offset being checked. If the types match,
1435 then we can return that field. */
1436 else if (lang_hooks
.types_compatible_p (orig_type
, field_type
))
1439 base
= build1 (INDIRECT_REF
, record_type
, base
);
1440 t
= build (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1444 /* Don't care about type-punning of scalars. */
1445 else if (!AGGREGATE_TYPE_P (field_type
))
1451 if (!tail_array_field
)
1454 f
= tail_array_field
;
1455 field_type
= TREE_TYPE (f
);
1458 /* If we get here, we've got an aggregate field, and a possibly
1459 nonzero offset into them. Recurse and hope for a valid match. */
1461 base
= build1 (INDIRECT_REF
, record_type
, base
);
1462 base
= build (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1464 t
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
);
1467 return maybe_fold_offset_to_component_ref (field_type
, base
, offset
,
1472 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1473 Return the simplified expression, or NULL if nothing could be done. */
1476 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
1480 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1481 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1482 are sometimes added. */
1485 TREE_OPERAND (expr
, 0) = base
;
1487 /* One possibility is that the address reduces to a string constant. */
1488 t
= fold_read_from_constant_string (expr
);
1492 /* Add in any offset from a PLUS_EXPR. */
1493 if (TREE_CODE (base
) == PLUS_EXPR
)
1497 offset2
= TREE_OPERAND (base
, 1);
1498 if (TREE_CODE (offset2
) != INTEGER_CST
)
1500 base
= TREE_OPERAND (base
, 0);
1502 offset
= int_const_binop (PLUS_EXPR
, offset
, offset2
, 1);
1505 if (TREE_CODE (base
) == ADDR_EXPR
)
1507 /* Strip the ADDR_EXPR. */
1508 base
= TREE_OPERAND (base
, 0);
1510 /* Fold away CONST_DECL to its value, if the type is scalar. */
1511 if (TREE_CODE (base
) == CONST_DECL
1512 && is_gimple_min_invariant (DECL_INITIAL (base
)))
1513 return DECL_INITIAL (base
);
1515 /* Try folding *(&B+O) to B[X]. */
1516 t
= maybe_fold_offset_to_array_ref (base
, offset
, TREE_TYPE (expr
));
1520 /* Try folding *(&B+O) to B.X. */
1521 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (base
), base
, offset
,
1522 TREE_TYPE (expr
), false);
1526 /* Fold *&B to B. We can only do this if EXPR is the same type
1527 as BASE. We can't do this if EXPR is the element type of an array
1528 and BASE is the array. */
1529 if (integer_zerop (offset
)
1530 && lang_hooks
.types_compatible_p (TREE_TYPE (base
),
1536 /* We can get here for out-of-range string constant accesses,
1537 such as "_"[3]. Bail out of the entire substitution search
1538 and arrange for the entire statement to be replaced by a
1539 call to __builtin_trap. In all likelyhood this will all be
1540 constant-folded away, but in the meantime we can't leave with
1541 something that get_expr_operands can't understand. */
1545 if (TREE_CODE (t
) == ADDR_EXPR
1546 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
1548 /* FIXME: Except that this causes problems elsewhere with dead
1549 code not being deleted, and we abort in the rtl expanders
1550 because we failed to remove some ssa_name. In the meantime,
1551 just return zero. */
1552 /* FIXME2: This condition should be signaled by
1553 fold_read_from_constant_string directly, rather than
1554 re-checking for it here. */
1555 return integer_zero_node
;
1558 /* Try folding *(B+O) to B->X. Still an improvement. */
1559 if (POINTER_TYPE_P (TREE_TYPE (base
)))
1561 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base
)),
1563 TREE_TYPE (expr
), true);
1569 /* Otherwise we had an offset that we could not simplify. */
1574 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1576 A quaint feature extant in our address arithmetic is that there
1577 can be hidden type changes here. The type of the result need
1578 not be the same as the type of the input pointer.
1580 What we're after here is an expression of the form
1581 (T *)(&array + const)
1582 where the cast doesn't actually exist, but is implicit in the
1583 type of the PLUS_EXPR. We'd like to turn this into
1585 which may be able to propagate further. */
1588 maybe_fold_stmt_addition (tree expr
)
1590 tree op0
= TREE_OPERAND (expr
, 0);
1591 tree op1
= TREE_OPERAND (expr
, 1);
1592 tree ptr_type
= TREE_TYPE (expr
);
1595 bool subtract
= (TREE_CODE (expr
) == MINUS_EXPR
);
1597 /* We're only interested in pointer arithmetic. */
1598 if (!POINTER_TYPE_P (ptr_type
))
1600 /* Canonicalize the integral operand to op1. */
1601 if (INTEGRAL_TYPE_P (TREE_TYPE (op0
)))
1605 t
= op0
, op0
= op1
, op1
= t
;
1607 /* It had better be a constant. */
1608 if (TREE_CODE (op1
) != INTEGER_CST
)
1610 /* The first operand should be an ADDR_EXPR. */
1611 if (TREE_CODE (op0
) != ADDR_EXPR
)
1613 op0
= TREE_OPERAND (op0
, 0);
1615 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1616 the offset into it. */
1617 while (TREE_CODE (op0
) == ARRAY_REF
)
1619 tree array_obj
= TREE_OPERAND (op0
, 0);
1620 tree array_idx
= TREE_OPERAND (op0
, 1);
1621 tree elt_type
= TREE_TYPE (op0
);
1622 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
1625 if (TREE_CODE (array_idx
) != INTEGER_CST
)
1627 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1630 /* Un-bias the index by the min index of the array type. */
1631 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
1634 min_idx
= TYPE_MIN_VALUE (min_idx
);
1637 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1640 array_idx
= convert (TREE_TYPE (min_idx
), array_idx
);
1641 if (!integer_zerop (min_idx
))
1642 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
1647 /* Convert the index to a byte offset. */
1648 array_idx
= convert (sizetype
, array_idx
);
1649 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
1651 /* Update the operands for the next round, or for folding. */
1652 /* If we're manipulating unsigned types, then folding into negative
1653 values can produce incorrect results. Particularly if the type
1654 is smaller than the width of the pointer. */
1656 && TYPE_UNSIGNED (TREE_TYPE (op1
))
1657 && tree_int_cst_lt (array_idx
, op1
))
1659 op1
= int_const_binop (subtract
? MINUS_EXPR
: PLUS_EXPR
,
1665 /* If we weren't able to fold the subtraction into another array reference,
1666 canonicalize the integer for passing to the array and component ref
1667 simplification functions. */
1670 if (TYPE_UNSIGNED (TREE_TYPE (op1
)))
1672 op1
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (op1
), op1
));
1673 /* ??? In theory fold should always produce another integer. */
1674 if (TREE_CODE (op1
) != INTEGER_CST
)
1678 ptd_type
= TREE_TYPE (ptr_type
);
1680 /* At which point we can try some of the same things as for indirects. */
1681 t
= maybe_fold_offset_to_array_ref (op0
, op1
, ptd_type
);
1683 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (op0
), op0
, op1
,
1686 t
= build1 (ADDR_EXPR
, ptr_type
, t
);
1692 /* Subroutine of fold_stmt called via walk_tree. We perform several
1693 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1696 fold_stmt_r (tree
*expr_p
, int *walk_subtrees
, void *data
)
1698 bool *changed_p
= data
;
1699 tree expr
= *expr_p
, t
;
1701 /* ??? It'd be nice if walk_tree had a pre-order option. */
1702 switch (TREE_CODE (expr
))
1705 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1710 t
= maybe_fold_stmt_indirect (expr
, TREE_OPERAND (expr
, 0),
1714 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1715 We'd only want to bother decomposing an existing ARRAY_REF if
1716 the base array is found to have another offset contained within.
1717 Otherwise we'd be wasting time. */
1720 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1725 /* Set TREE_INVARIANT properly so that the value is properly
1726 considered constant, and so gets propagated as expected. */
1728 recompute_tree_invarant_for_addr_expr (expr
);
1733 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1736 t
= walk_tree (&TREE_OPERAND (expr
, 1), fold_stmt_r
, data
, NULL
);
1741 t
= maybe_fold_stmt_addition (expr
);
1745 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1750 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
1751 We've already checked that the records are compatible, so we should
1752 come up with a set of compatible fields. */
1754 tree expr_record
= TREE_TYPE (TREE_OPERAND (expr
, 0));
1755 tree expr_field
= TREE_OPERAND (expr
, 1);
1757 if (DECL_FIELD_CONTEXT (expr_field
) != TYPE_MAIN_VARIANT (expr_record
))
1759 expr_field
= find_compatible_field (expr_record
, expr_field
);
1760 TREE_OPERAND (expr
, 1) = expr_field
;
1779 /* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
1780 follow its use-def chains. If LENGTH is not NULL and its value is not
1781 equal to the length we determine, or if we are unable to determine the
1782 length, return false. VISITED is a bitmap of visited variables. */
1785 get_strlen (tree arg
, tree
*length
, bitmap visited
)
1787 tree var
, def_stmt
, val
;
1789 if (TREE_CODE (arg
) != SSA_NAME
)
1791 val
= c_strlen (arg
, 1);
1795 if (*length
&& simple_cst_equal (val
, *length
) != 1)
1802 /* If we were already here, break the infinite cycle. */
1803 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
1805 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
1808 def_stmt
= SSA_NAME_DEF_STMT (var
);
1810 switch (TREE_CODE (def_stmt
))
1816 /* The RHS of the statement defining VAR must either have a
1817 constant length or come from another SSA_NAME with a constant
1819 rhs
= TREE_OPERAND (def_stmt
, 1);
1821 if (TREE_CODE (rhs
) == SSA_NAME
)
1822 return get_strlen (rhs
, length
, visited
);
1824 /* See if the RHS is a constant length. */
1825 len
= c_strlen (rhs
, 1);
1828 if (*length
&& simple_cst_equal (len
, *length
) != 1)
1840 /* All the arguments of the PHI node must have the same constant
1844 for (i
= 0; i
< PHI_NUM_ARGS (def_stmt
); i
++)
1846 tree arg
= PHI_ARG_DEF (def_stmt
, i
);
1848 /* If this PHI has itself as an argument, we cannot
1849 determine the string length of this argument. However,
1850 if we can find a constant string length for the other
1851 PHI args then we can still be sure that this is a
1852 constant string length. So be optimistic and just
1853 continue with the next argument. */
1854 if (arg
== PHI_RESULT (def_stmt
))
1857 if (!get_strlen (arg
, length
, visited
))
1873 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
1874 constant, return NULL_TREE. Otherwise, return its constant value. */
1877 ccp_fold_builtin (tree stmt
, tree fn
)
1879 tree result
, strlen_val
[2];
1880 tree callee
, arglist
, a
;
1885 ignore
= TREE_CODE (stmt
) != MODIFY_EXPR
;
1887 /* First try the generic builtin folder. If that succeeds, return the
1889 result
= fold_builtin (fn
, ignore
);
1893 STRIP_NOPS (result
);
1897 /* Ignore MD builtins. */
1898 callee
= get_callee_fndecl (fn
);
1899 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1902 /* If the builtin could not be folded, and it has no argument list,
1904 arglist
= TREE_OPERAND (fn
, 1);
1908 /* Limit the work only for builtins we know how to simplify. */
1909 switch (DECL_FUNCTION_CODE (callee
))
1911 case BUILT_IN_STRLEN
:
1912 case BUILT_IN_FPUTS
:
1913 case BUILT_IN_FPUTS_UNLOCKED
:
1916 case BUILT_IN_STRCPY
:
1917 case BUILT_IN_STRNCPY
:
1924 /* Try to use the dataflow information gathered by the CCP process. */
1925 visited
= BITMAP_XMALLOC ();
1927 memset (strlen_val
, 0, sizeof (strlen_val
));
1928 for (i
= 0, a
= arglist
;
1930 i
++, strlen_arg
>>= 1, a
= TREE_CHAIN (a
))
1933 bitmap_clear (visited
);
1934 if (!get_strlen (TREE_VALUE (a
), &strlen_val
[i
], visited
))
1935 strlen_val
[i
] = NULL_TREE
;
1938 BITMAP_XFREE (visited
);
1941 switch (DECL_FUNCTION_CODE (callee
))
1943 case BUILT_IN_STRLEN
:
1946 tree
new = fold_convert (TREE_TYPE (fn
), strlen_val
[0]);
1948 /* If the result is not a valid gimple value, or not a cast
1949 of a valid gimple value, then we can not use the result. */
1950 if (is_gimple_val (new)
1951 || (is_gimple_cast (new)
1952 && is_gimple_val (TREE_OPERAND (new, 0))))
1957 case BUILT_IN_STRCPY
:
1958 if (strlen_val
[1] && is_gimple_val (strlen_val
[1]))
1959 result
= fold_builtin_strcpy (fn
, strlen_val
[1]);
1962 case BUILT_IN_STRNCPY
:
1963 if (strlen_val
[1] && is_gimple_val (strlen_val
[1]))
1964 result
= fold_builtin_strncpy (fn
, strlen_val
[1]);
1967 case BUILT_IN_FPUTS
:
1968 result
= fold_builtin_fputs (arglist
,
1969 TREE_CODE (stmt
) != MODIFY_EXPR
, 0,
1973 case BUILT_IN_FPUTS_UNLOCKED
:
1974 result
= fold_builtin_fputs (arglist
,
1975 TREE_CODE (stmt
) != MODIFY_EXPR
, 1,
1983 if (result
&& ignore
)
1984 result
= fold_ignored_result (result
);
1989 /* Fold the statement pointed by STMT_P. In some cases, this function may
1990 replace the whole statement with a new one. Returns true iff folding
1991 makes any changes. */
1994 fold_stmt (tree
*stmt_p
)
1996 tree rhs
, result
, stmt
;
1997 bool changed
= false;
2001 /* If we replaced constants and the statement makes pointer dereferences,
2002 then we may need to fold instances of *&VAR into VAR, etc. */
2003 if (walk_tree (stmt_p
, fold_stmt_r
, &changed
, NULL
))
2006 = build_function_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
],
2011 rhs
= get_rhs (stmt
);
2016 if (TREE_CODE (rhs
) == CALL_EXPR
)
2020 /* Check for builtins that CCP can handle using information not
2021 available in the generic fold routines. */
2022 callee
= get_callee_fndecl (rhs
);
2023 if (callee
&& DECL_BUILT_IN (callee
))
2024 result
= ccp_fold_builtin (stmt
, rhs
);
2027 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2028 here are when we've propagated the address of a decl into the
2030 /* ??? Should perhaps do this in fold proper. However, doing it
2031 there requires that we create a new CALL_EXPR, and that requires
2032 copying EH region info to the new node. Easier to just do it
2033 here where we can just smash the call operand. */
2034 callee
= TREE_OPERAND (rhs
, 0);
2035 if (TREE_CODE (callee
) == OBJ_TYPE_REF
2036 && lang_hooks
.fold_obj_type_ref
2037 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
2038 && DECL_P (TREE_OPERAND (OBJ_TYPE_REF_OBJECT (callee
), 0)))
2042 /* ??? Caution: Broken ADDR_EXPR semantics means that
2043 looking at the type of the operand of the addr_expr
2044 can yield an array type. See silly exception in
2045 check_pointer_types_r. */
2047 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
2048 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
2051 TREE_OPERAND (rhs
, 0) = t
;
2058 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2059 if (result
== NULL_TREE
)
2060 result
= fold (rhs
);
2062 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2063 may have been added by fold, and "useless" type conversions that might
2064 now be apparent due to propagation. */
2065 STRIP_USELESS_TYPE_CONVERSION (result
);
2068 changed
|= set_rhs (stmt_p
, result
);
2074 /* A simple pass that attempts to fold all builtin functions. This pass
2075 is run after we've propagated as many constants as we can. */
2078 execute_fold_all_builtins (void)
2083 block_stmt_iterator i
;
2084 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
2086 tree
*stmtp
= bsi_stmt_ptr (i
);
2087 tree call
= get_rhs (*stmtp
);
2088 tree callee
, result
;
2090 if (!call
|| TREE_CODE (call
) != CALL_EXPR
)
2092 callee
= get_callee_fndecl (call
);
2093 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2096 result
= ccp_fold_builtin (*stmtp
, call
);
2098 switch (DECL_FUNCTION_CODE (callee
))
2100 case BUILT_IN_CONSTANT_P
:
2101 /* Resolve __builtin_constant_p. If it hasn't been
2102 folded to integer_one_node by now, it's fairly
2103 certain that the value simply isn't constant. */
2104 result
= integer_zero_node
;
2111 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2113 fprintf (dump_file
, "Simplified\n ");
2114 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2117 if (set_rhs (stmtp
, result
))
2118 modify_stmt (*stmtp
);
2120 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2122 fprintf (dump_file
, "to\n ");
2123 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2124 fprintf (dump_file
, "\n");
2131 struct tree_opt_pass pass_fold_builtins
=
2135 execute_fold_all_builtins
, /* execute */
2138 0, /* static_pass_number */
2140 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
2141 0, /* properties_provided */
2142 0, /* properties_destroyed */
2143 0, /* todo_flags_start */
2144 TODO_dump_func
| TODO_verify_ssa
, /* todo_flags_finish */