1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
43 CONSTANT -> V_i has been found to hold a constant
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
80 a_11 = PHI (a_9, a_10)
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
123 #include "coretypes.h"
129 #include "fold-const.h"
130 #include "stor-layout.h"
134 #include "hard-reg-set.h"
136 #include "function.h"
137 #include "dominance.h"
139 #include "basic-block.h"
140 #include "gimple-pretty-print.h"
141 #include "tree-ssa-alias.h"
142 #include "internal-fn.h"
143 #include "gimple-fold.h"
145 #include "gimple-expr.h"
148 #include "gimplify.h"
149 #include "gimple-iterator.h"
150 #include "gimple-ssa.h"
151 #include "tree-cfg.h"
152 #include "tree-phinodes.h"
153 #include "ssa-iterators.h"
154 #include "stringpool.h"
155 #include "tree-ssanames.h"
156 #include "tree-pass.h"
157 #include "tree-ssa-propagate.h"
158 #include "value-prof.h"
159 #include "langhooks.h"
161 #include "diagnostic-core.h"
164 #include "wide-int-print.h"
165 #include "builtins.h"
166 #include "tree-chkp.h"
169 /* Possible lattice values. */
178 struct ccp_prop_value_t
{
180 ccp_lattice_t lattice_val
;
182 /* Propagated value. */
185 /* Mask that applies to the propagated value during CCP. For X
186 with a CONSTANT lattice value X & ~mask == value & ~mask. The
187 zero bits in the mask cover constant values. The ones mean no
192 /* Array of propagated constant values. After propagation,
193 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
194 the constant is held in an SSA name representing a memory store
195 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
196 memory reference used to store (i.e., the LHS of the assignment
198 static ccp_prop_value_t
*const_val
;
199 static unsigned n_const_val
;
201 static void canonicalize_value (ccp_prop_value_t
*);
202 static bool ccp_fold_stmt (gimple_stmt_iterator
*);
203 static void ccp_lattice_meet (ccp_prop_value_t
*, ccp_prop_value_t
*);
205 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
208 dump_lattice_value (FILE *outf
, const char *prefix
, ccp_prop_value_t val
)
210 switch (val
.lattice_val
)
213 fprintf (outf
, "%sUNINITIALIZED", prefix
);
216 fprintf (outf
, "%sUNDEFINED", prefix
);
219 fprintf (outf
, "%sVARYING", prefix
);
222 if (TREE_CODE (val
.value
) != INTEGER_CST
225 fprintf (outf
, "%sCONSTANT ", prefix
);
226 print_generic_expr (outf
, val
.value
, dump_flags
);
230 widest_int cval
= wi::bit_and_not (wi::to_widest (val
.value
),
232 fprintf (outf
, "%sCONSTANT ", prefix
);
233 print_hex (cval
, outf
);
234 fprintf (outf
, " (");
235 print_hex (val
.mask
, outf
);
245 /* Print lattice value VAL to stderr. */
247 void debug_lattice_value (ccp_prop_value_t val
);
250 debug_lattice_value (ccp_prop_value_t val
)
252 dump_lattice_value (stderr
, "", val
);
253 fprintf (stderr
, "\n");
256 /* Extend NONZERO_BITS to a full mask, with the upper bits being set. */
259 extend_mask (const wide_int
&nonzero_bits
)
261 return (wi::mask
<widest_int
> (wi::get_precision (nonzero_bits
), true)
262 | widest_int::from (nonzero_bits
, UNSIGNED
));
265 /* Compute a default value for variable VAR and store it in the
266 CONST_VAL array. The following rules are used to get default
269 1- Global and static variables that are declared constant are
272 2- Any other value is considered UNDEFINED. This is useful when
273 considering PHI nodes. PHI arguments that are undefined do not
274 change the constant value of the PHI node, which allows for more
275 constants to be propagated.
277 3- Variables defined by statements other than assignments and PHI
278 nodes are considered VARYING.
280 4- Initial values of variables that are not GIMPLE registers are
281 considered VARYING. */
283 static ccp_prop_value_t
284 get_default_value (tree var
)
286 ccp_prop_value_t val
= { UNINITIALIZED
, NULL_TREE
, 0 };
289 stmt
= SSA_NAME_DEF_STMT (var
);
291 if (gimple_nop_p (stmt
))
293 /* Variables defined by an empty statement are those used
294 before being initialized. If VAR is a local variable, we
295 can assume initially that it is UNDEFINED, otherwise we must
296 consider it VARYING. */
297 if (!virtual_operand_p (var
)
298 && TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
299 val
.lattice_val
= UNDEFINED
;
302 val
.lattice_val
= VARYING
;
304 if (flag_tree_bit_ccp
)
306 wide_int nonzero_bits
= get_nonzero_bits (var
);
307 if (nonzero_bits
!= -1)
309 val
.lattice_val
= CONSTANT
;
310 val
.value
= build_zero_cst (TREE_TYPE (var
));
311 val
.mask
= extend_mask (nonzero_bits
);
316 else if (is_gimple_assign (stmt
))
319 if (gimple_assign_single_p (stmt
)
320 && DECL_P (gimple_assign_rhs1 (stmt
))
321 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
323 val
.lattice_val
= CONSTANT
;
328 /* Any other variable defined by an assignment is considered
330 val
.lattice_val
= UNDEFINED
;
333 else if ((is_gimple_call (stmt
)
334 && gimple_call_lhs (stmt
) != NULL_TREE
)
335 || gimple_code (stmt
) == GIMPLE_PHI
)
337 /* A variable defined by a call or a PHI node is considered
339 val
.lattice_val
= UNDEFINED
;
343 /* Otherwise, VAR will never take on a constant value. */
344 val
.lattice_val
= VARYING
;
352 /* Get the constant value associated with variable VAR. */
354 static inline ccp_prop_value_t
*
357 ccp_prop_value_t
*val
;
359 if (const_val
== NULL
360 || SSA_NAME_VERSION (var
) >= n_const_val
)
363 val
= &const_val
[SSA_NAME_VERSION (var
)];
364 if (val
->lattice_val
== UNINITIALIZED
)
365 *val
= get_default_value (var
);
367 canonicalize_value (val
);
372 /* Return the constant tree value associated with VAR. */
375 get_constant_value (tree var
)
377 ccp_prop_value_t
*val
;
378 if (TREE_CODE (var
) != SSA_NAME
)
380 if (is_gimple_min_invariant (var
))
384 val
= get_value (var
);
386 && val
->lattice_val
== CONSTANT
387 && (TREE_CODE (val
->value
) != INTEGER_CST
393 /* Sets the value associated with VAR to VARYING. */
396 set_value_varying (tree var
)
398 ccp_prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
400 val
->lattice_val
= VARYING
;
401 val
->value
= NULL_TREE
;
405 /* For integer constants, make sure to drop TREE_OVERFLOW. */
408 canonicalize_value (ccp_prop_value_t
*val
)
410 if (val
->lattice_val
!= CONSTANT
)
413 if (TREE_OVERFLOW_P (val
->value
))
414 val
->value
= drop_tree_overflow (val
->value
);
417 /* Return whether the lattice transition is valid. */
420 valid_lattice_transition (ccp_prop_value_t old_val
, ccp_prop_value_t new_val
)
422 /* Lattice transitions must always be monotonically increasing in
424 if (old_val
.lattice_val
< new_val
.lattice_val
)
427 if (old_val
.lattice_val
!= new_val
.lattice_val
)
430 if (!old_val
.value
&& !new_val
.value
)
433 /* Now both lattice values are CONSTANT. */
435 /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
436 when only a single copy edge is executable. */
437 if (TREE_CODE (old_val
.value
) == SSA_NAME
438 && TREE_CODE (new_val
.value
) == SSA_NAME
)
441 /* Allow transitioning from a constant to a copy. */
442 if (is_gimple_min_invariant (old_val
.value
)
443 && TREE_CODE (new_val
.value
) == SSA_NAME
)
446 /* Allow transitioning from PHI <&x, not executable> == &x
447 to PHI <&x, &y> == common alignment. */
448 if (TREE_CODE (old_val
.value
) != INTEGER_CST
449 && TREE_CODE (new_val
.value
) == INTEGER_CST
)
452 /* Bit-lattices have to agree in the still valid bits. */
453 if (TREE_CODE (old_val
.value
) == INTEGER_CST
454 && TREE_CODE (new_val
.value
) == INTEGER_CST
)
455 return (wi::bit_and_not (wi::to_widest (old_val
.value
), new_val
.mask
)
456 == wi::bit_and_not (wi::to_widest (new_val
.value
), new_val
.mask
));
458 /* Otherwise constant values have to agree. */
459 if (operand_equal_p (old_val
.value
, new_val
.value
, 0))
462 /* At least the kinds and types should agree now. */
463 if (TREE_CODE (old_val
.value
) != TREE_CODE (new_val
.value
)
464 || !types_compatible_p (TREE_TYPE (old_val
.value
),
465 TREE_TYPE (new_val
.value
)))
468 /* For floats and !HONOR_NANS allow transitions from (partial) NaN
470 tree type
= TREE_TYPE (new_val
.value
);
471 if (SCALAR_FLOAT_TYPE_P (type
)
472 && !HONOR_NANS (type
))
474 if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val
.value
)))
477 else if (VECTOR_FLOAT_TYPE_P (type
)
478 && !HONOR_NANS (type
))
480 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (old_val
.value
); ++i
)
481 if (!REAL_VALUE_ISNAN
482 (TREE_REAL_CST (VECTOR_CST_ELT (old_val
.value
, i
)))
483 && !operand_equal_p (VECTOR_CST_ELT (old_val
.value
, i
),
484 VECTOR_CST_ELT (new_val
.value
, i
), 0))
488 else if (COMPLEX_FLOAT_TYPE_P (type
)
489 && !HONOR_NANS (type
))
491 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val
.value
)))
492 && !operand_equal_p (TREE_REALPART (old_val
.value
),
493 TREE_REALPART (new_val
.value
), 0))
495 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val
.value
)))
496 && !operand_equal_p (TREE_IMAGPART (old_val
.value
),
497 TREE_IMAGPART (new_val
.value
), 0))
504 /* Set the value for variable VAR to NEW_VAL. Return true if the new
505 value is different from VAR's previous value. */
508 set_lattice_value (tree var
, ccp_prop_value_t
*new_val
)
510 /* We can deal with old UNINITIALIZED values just fine here. */
511 ccp_prop_value_t
*old_val
= &const_val
[SSA_NAME_VERSION (var
)];
513 canonicalize_value (new_val
);
515 /* We have to be careful to not go up the bitwise lattice
516 represented by the mask. Instead of dropping to VARYING
517 use the meet operator to retain a conservative value.
518 Missed optimizations like PR65851 makes this necessary.
519 It also ensures we converge to a stable lattice solution. */
520 if (new_val
->lattice_val
== CONSTANT
521 && old_val
->lattice_val
== CONSTANT
522 && TREE_CODE (new_val
->value
) != SSA_NAME
)
523 ccp_lattice_meet (new_val
, old_val
);
525 gcc_checking_assert (valid_lattice_transition (*old_val
, *new_val
));
527 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
528 caller that this was a non-transition. */
529 if (old_val
->lattice_val
!= new_val
->lattice_val
530 || (new_val
->lattice_val
== CONSTANT
531 && (TREE_CODE (new_val
->value
) != TREE_CODE (old_val
->value
)
532 || (TREE_CODE (new_val
->value
) == INTEGER_CST
533 && (new_val
->mask
!= old_val
->mask
534 || (wi::bit_and_not (wi::to_widest (old_val
->value
),
536 != wi::bit_and_not (wi::to_widest (new_val
->value
),
538 || (TREE_CODE (new_val
->value
) != INTEGER_CST
539 && !operand_equal_p (new_val
->value
, old_val
->value
, 0)))))
541 /* ??? We would like to delay creation of INTEGER_CSTs from
542 partially constants here. */
544 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
546 dump_lattice_value (dump_file
, "Lattice value changed to ", *new_val
);
547 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
552 gcc_assert (new_val
->lattice_val
!= UNINITIALIZED
);
559 static ccp_prop_value_t
get_value_for_expr (tree
, bool);
560 static ccp_prop_value_t
bit_value_binop (enum tree_code
, tree
, tree
, tree
);
561 static void bit_value_binop_1 (enum tree_code
, tree
, widest_int
*, widest_int
*,
562 tree
, const widest_int
&, const widest_int
&,
563 tree
, const widest_int
&, const widest_int
&);
565 /* Return a widest_int that can be used for bitwise simplifications
569 value_to_wide_int (ccp_prop_value_t val
)
572 && TREE_CODE (val
.value
) == INTEGER_CST
)
573 return wi::to_widest (val
.value
);
578 /* Return the value for the address expression EXPR based on alignment
581 static ccp_prop_value_t
582 get_value_from_alignment (tree expr
)
584 tree type
= TREE_TYPE (expr
);
585 ccp_prop_value_t val
;
586 unsigned HOST_WIDE_INT bitpos
;
589 gcc_assert (TREE_CODE (expr
) == ADDR_EXPR
);
591 get_pointer_alignment_1 (expr
, &align
, &bitpos
);
592 val
.mask
= (POINTER_TYPE_P (type
) || TYPE_UNSIGNED (type
)
593 ? wi::mask
<widest_int
> (TYPE_PRECISION (type
), false)
594 : -1).and_not (align
/ BITS_PER_UNIT
- 1);
596 = wi::sext (val
.mask
, TYPE_PRECISION (type
)) == -1 ? VARYING
: CONSTANT
;
597 if (val
.lattice_val
== CONSTANT
)
598 val
.value
= build_int_cstu (type
, bitpos
/ BITS_PER_UNIT
);
600 val
.value
= NULL_TREE
;
605 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
606 return constant bits extracted from alignment information for
607 invariant addresses. */
609 static ccp_prop_value_t
610 get_value_for_expr (tree expr
, bool for_bits_p
)
612 ccp_prop_value_t val
;
614 if (TREE_CODE (expr
) == SSA_NAME
)
616 val
= *get_value (expr
);
618 && val
.lattice_val
== CONSTANT
619 && TREE_CODE (val
.value
) == ADDR_EXPR
)
620 val
= get_value_from_alignment (val
.value
);
621 /* Fall back to a copy value. */
623 && val
.lattice_val
== VARYING
624 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr
))
626 val
.lattice_val
= CONSTANT
;
631 else if (is_gimple_min_invariant (expr
)
632 && (!for_bits_p
|| TREE_CODE (expr
) != ADDR_EXPR
))
634 val
.lattice_val
= CONSTANT
;
637 canonicalize_value (&val
);
639 else if (TREE_CODE (expr
) == ADDR_EXPR
)
640 val
= get_value_from_alignment (expr
);
643 val
.lattice_val
= VARYING
;
645 val
.value
= NULL_TREE
;
650 /* Return the likely CCP lattice value for STMT.
652 If STMT has no operands, then return CONSTANT.
654 Else if undefinedness of operands of STMT cause its value to be
655 undefined, then return UNDEFINED.
657 Else if any operands of STMT are constants, then return CONSTANT.
659 Else return VARYING. */
662 likely_value (gimple stmt
)
664 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
665 bool has_nsa_operand
;
670 enum gimple_code code
= gimple_code (stmt
);
672 /* This function appears to be called only for assignments, calls,
673 conditionals, and switches, due to the logic in visit_stmt. */
674 gcc_assert (code
== GIMPLE_ASSIGN
675 || code
== GIMPLE_CALL
676 || code
== GIMPLE_COND
677 || code
== GIMPLE_SWITCH
);
679 /* If the statement has volatile operands, it won't fold to a
681 if (gimple_has_volatile_ops (stmt
))
684 /* Arrive here for more complex cases. */
685 has_constant_operand
= false;
686 has_undefined_operand
= false;
687 all_undefined_operands
= true;
688 has_nsa_operand
= false;
689 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
691 ccp_prop_value_t
*val
= get_value (use
);
693 if (val
->lattice_val
== UNDEFINED
)
694 has_undefined_operand
= true;
696 all_undefined_operands
= false;
698 if (val
->lattice_val
== CONSTANT
)
699 has_constant_operand
= true;
701 if (SSA_NAME_IS_DEFAULT_DEF (use
)
702 || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use
)))
703 has_nsa_operand
= true;
706 /* There may be constants in regular rhs operands. For calls we
707 have to ignore lhs, fndecl and static chain, otherwise only
709 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
710 i
< gimple_num_ops (stmt
); ++i
)
712 tree op
= gimple_op (stmt
, i
);
713 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
715 if (is_gimple_min_invariant (op
))
716 has_constant_operand
= true;
719 if (has_constant_operand
)
720 all_undefined_operands
= false;
722 if (has_undefined_operand
723 && code
== GIMPLE_CALL
724 && gimple_call_internal_p (stmt
))
725 switch (gimple_call_internal_fn (stmt
))
727 /* These 3 builtins use the first argument just as a magic
728 way how to find out a decl uid. */
729 case IFN_GOMP_SIMD_LANE
:
730 case IFN_GOMP_SIMD_VF
:
731 case IFN_GOMP_SIMD_LAST_LANE
:
732 has_undefined_operand
= false;
738 /* If the operation combines operands like COMPLEX_EXPR make sure to
739 not mark the result UNDEFINED if only one part of the result is
741 if (has_undefined_operand
&& all_undefined_operands
)
743 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
745 switch (gimple_assign_rhs_code (stmt
))
747 /* Unary operators are handled with all_undefined_operands. */
750 case POINTER_PLUS_EXPR
:
751 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
752 Not bitwise operators, one VARYING operand may specify the
753 result completely. Not logical operators for the same reason.
754 Not COMPLEX_EXPR as one VARYING operand makes the result partly
755 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
756 the undefined operand may be promoted. */
760 /* If any part of an address is UNDEFINED, like the index
761 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
768 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
769 fall back to CONSTANT. During iteration UNDEFINED may still drop
771 if (has_undefined_operand
)
774 /* We do not consider virtual operands here -- load from read-only
775 memory may have only VARYING virtual operands, but still be
776 constant. Also we can combine the stmt with definitions from
777 operands whose definitions are not simulated again. */
778 if (has_constant_operand
780 || gimple_references_memory_p (stmt
))
786 /* Returns true if STMT cannot be constant. */
789 surely_varying_stmt_p (gimple stmt
)
791 /* If the statement has operands that we cannot handle, it cannot be
793 if (gimple_has_volatile_ops (stmt
))
796 /* If it is a call and does not return a value or is not a
797 builtin and not an indirect call or a call to function with
798 assume_aligned/alloc_align attribute, it is varying. */
799 if (is_gimple_call (stmt
))
801 tree fndecl
, fntype
= gimple_call_fntype (stmt
);
802 if (!gimple_call_lhs (stmt
)
803 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
804 && !DECL_BUILT_IN (fndecl
)
805 && !lookup_attribute ("assume_aligned",
806 TYPE_ATTRIBUTES (fntype
))
807 && !lookup_attribute ("alloc_align",
808 TYPE_ATTRIBUTES (fntype
))))
812 /* Any other store operation is not interesting. */
813 else if (gimple_vdef (stmt
))
816 /* Anything other than assignments and conditional jumps are not
817 interesting for CCP. */
818 if (gimple_code (stmt
) != GIMPLE_ASSIGN
819 && gimple_code (stmt
) != GIMPLE_COND
820 && gimple_code (stmt
) != GIMPLE_SWITCH
821 && gimple_code (stmt
) != GIMPLE_CALL
)
827 /* Initialize local data structures for CCP. */
830 ccp_initialize (void)
834 n_const_val
= num_ssa_names
;
835 const_val
= XCNEWVEC (ccp_prop_value_t
, n_const_val
);
837 /* Initialize simulation flags for PHI nodes and statements. */
838 FOR_EACH_BB_FN (bb
, cfun
)
840 gimple_stmt_iterator i
;
842 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
844 gimple stmt
= gsi_stmt (i
);
847 /* If the statement is a control insn, then we do not
848 want to avoid simulating the statement once. Failure
849 to do so means that those edges will never get added. */
850 if (stmt_ends_bb_p (stmt
))
853 is_varying
= surely_varying_stmt_p (stmt
);
860 /* If the statement will not produce a constant, mark
861 all its outputs VARYING. */
862 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
863 set_value_varying (def
);
865 prop_set_simulate_again (stmt
, !is_varying
);
869 /* Now process PHI nodes. We never clear the simulate_again flag on
870 phi nodes, since we do not know which edges are executable yet,
871 except for phi nodes for virtual operands when we do not do store ccp. */
872 FOR_EACH_BB_FN (bb
, cfun
)
876 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
878 gphi
*phi
= i
.phi ();
880 if (virtual_operand_p (gimple_phi_result (phi
)))
881 prop_set_simulate_again (phi
, false);
883 prop_set_simulate_again (phi
, true);
888 /* Debug count support. Reset the values of ssa names
889 VARYING when the total number ssa names analyzed is
890 beyond the debug count specified. */
896 for (i
= 0; i
< num_ssa_names
; i
++)
900 const_val
[i
].lattice_val
= VARYING
;
901 const_val
[i
].mask
= -1;
902 const_val
[i
].value
= NULL_TREE
;
908 /* Do final substitution of propagated values, cleanup the flowgraph and
909 free allocated storage.
911 Return TRUE when something was optimized. */
916 bool something_changed
;
921 /* Derive alignment and misalignment information from partially
922 constant pointers in the lattice or nonzero bits from partially
923 constant integers. */
924 for (i
= 1; i
< num_ssa_names
; ++i
)
926 tree name
= ssa_name (i
);
927 ccp_prop_value_t
*val
;
928 unsigned int tem
, align
;
931 || (!POINTER_TYPE_P (TREE_TYPE (name
))
932 && (!INTEGRAL_TYPE_P (TREE_TYPE (name
))
933 /* Don't record nonzero bits before IPA to avoid
934 using too much memory. */
935 || first_pass_instance
)))
938 val
= get_value (name
);
939 if (val
->lattice_val
!= CONSTANT
940 || TREE_CODE (val
->value
) != INTEGER_CST
)
943 if (POINTER_TYPE_P (TREE_TYPE (name
)))
945 /* Trailing mask bits specify the alignment, trailing value
946 bits the misalignment. */
947 tem
= val
->mask
.to_uhwi ();
948 align
= (tem
& -tem
);
950 set_ptr_info_alignment (get_ptr_info (name
), align
,
951 (TREE_INT_CST_LOW (val
->value
)
956 unsigned int precision
= TYPE_PRECISION (TREE_TYPE (val
->value
));
957 wide_int nonzero_bits
= wide_int::from (val
->mask
, precision
,
958 UNSIGNED
) | val
->value
;
959 nonzero_bits
&= get_nonzero_bits (name
);
960 set_nonzero_bits (name
, nonzero_bits
);
964 /* Perform substitutions based on the known constant values. */
965 something_changed
= substitute_and_fold (get_constant_value
,
966 ccp_fold_stmt
, true);
970 return something_changed
;;
974 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
977 any M UNDEFINED = any
978 any M VARYING = VARYING
979 Ci M Cj = Ci if (i == j)
980 Ci M Cj = VARYING if (i != j)
984 ccp_lattice_meet (ccp_prop_value_t
*val1
, ccp_prop_value_t
*val2
)
986 if (val1
->lattice_val
== UNDEFINED
987 /* For UNDEFINED M SSA we can't always SSA because its definition
988 may not dominate the PHI node. Doing optimistic copy propagation
989 also causes a lot of gcc.dg/uninit-pred*.c FAILs. */
990 && (val2
->lattice_val
!= CONSTANT
991 || TREE_CODE (val2
->value
) != SSA_NAME
))
993 /* UNDEFINED M any = any */
996 else if (val2
->lattice_val
== UNDEFINED
998 && (val1
->lattice_val
!= CONSTANT
999 || TREE_CODE (val1
->value
) != SSA_NAME
))
1001 /* any M UNDEFINED = any
1002 Nothing to do. VAL1 already contains the value we want. */
1005 else if (val1
->lattice_val
== VARYING
1006 || val2
->lattice_val
== VARYING
)
1008 /* any M VARYING = VARYING. */
1009 val1
->lattice_val
= VARYING
;
1011 val1
->value
= NULL_TREE
;
1013 else if (val1
->lattice_val
== CONSTANT
1014 && val2
->lattice_val
== CONSTANT
1015 && TREE_CODE (val1
->value
) == INTEGER_CST
1016 && TREE_CODE (val2
->value
) == INTEGER_CST
)
1018 /* Ci M Cj = Ci if (i == j)
1019 Ci M Cj = VARYING if (i != j)
1021 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
1023 val1
->mask
= (val1
->mask
| val2
->mask
1024 | (wi::to_widest (val1
->value
)
1025 ^ wi::to_widest (val2
->value
)));
1026 if (wi::sext (val1
->mask
, TYPE_PRECISION (TREE_TYPE (val1
->value
))) == -1)
1028 val1
->lattice_val
= VARYING
;
1029 val1
->value
= NULL_TREE
;
1032 else if (val1
->lattice_val
== CONSTANT
1033 && val2
->lattice_val
== CONSTANT
1034 && operand_equal_p (val1
->value
, val2
->value
, 0))
1036 /* Ci M Cj = Ci if (i == j)
1037 Ci M Cj = VARYING if (i != j)
1039 VAL1 already contains the value we want for equivalent values. */
1041 else if (val1
->lattice_val
== CONSTANT
1042 && val2
->lattice_val
== CONSTANT
1043 && (TREE_CODE (val1
->value
) == ADDR_EXPR
1044 || TREE_CODE (val2
->value
) == ADDR_EXPR
))
1046 /* When not equal addresses are involved try meeting for
1048 ccp_prop_value_t tem
= *val2
;
1049 if (TREE_CODE (val1
->value
) == ADDR_EXPR
)
1050 *val1
= get_value_for_expr (val1
->value
, true);
1051 if (TREE_CODE (val2
->value
) == ADDR_EXPR
)
1052 tem
= get_value_for_expr (val2
->value
, true);
1053 ccp_lattice_meet (val1
, &tem
);
1057 /* Any other combination is VARYING. */
1058 val1
->lattice_val
= VARYING
;
1060 val1
->value
= NULL_TREE
;
1065 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
1066 lattice values to determine PHI_NODE's lattice value. The value of a
1067 PHI node is determined calling ccp_lattice_meet with all the arguments
1068 of the PHI node that are incoming via executable edges. */
1070 static enum ssa_prop_result
1071 ccp_visit_phi_node (gphi
*phi
)
1074 ccp_prop_value_t new_val
;
1076 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1078 fprintf (dump_file
, "\nVisiting PHI node: ");
1079 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
1082 new_val
.lattice_val
= UNDEFINED
;
1083 new_val
.value
= NULL_TREE
;
1087 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1089 /* Compute the meet operator over all the PHI arguments flowing
1090 through executable edges. */
1091 edge e
= gimple_phi_arg_edge (phi
, i
);
1093 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1096 "\n Argument #%d (%d -> %d %sexecutable)\n",
1097 i
, e
->src
->index
, e
->dest
->index
,
1098 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
1101 /* If the incoming edge is executable, Compute the meet operator for
1102 the existing value of the PHI node and the current PHI argument. */
1103 if (e
->flags
& EDGE_EXECUTABLE
)
1105 tree arg
= gimple_phi_arg (phi
, i
)->def
;
1106 ccp_prop_value_t arg_val
= get_value_for_expr (arg
, false);
1114 ccp_lattice_meet (&new_val
, &arg_val
);
1116 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1118 fprintf (dump_file
, "\t");
1119 print_generic_expr (dump_file
, arg
, dump_flags
);
1120 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
1121 fprintf (dump_file
, "\n");
1124 if (new_val
.lattice_val
== VARYING
)
1129 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1131 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
1132 fprintf (dump_file
, "\n\n");
1135 /* Make the transition to the new value. */
1136 if (set_lattice_value (gimple_phi_result (phi
), &new_val
))
1138 if (new_val
.lattice_val
== VARYING
)
1139 return SSA_PROP_VARYING
;
1141 return SSA_PROP_INTERESTING
;
1144 return SSA_PROP_NOT_INTERESTING
;
1147 /* Return the constant value for OP or OP otherwise. */
1150 valueize_op (tree op
)
1152 if (TREE_CODE (op
) == SSA_NAME
)
1154 tree tem
= get_constant_value (op
);
1161 /* Return the constant value for OP, but signal to not follow SSA
1162 edges if the definition may be simulated again. */
1165 valueize_op_1 (tree op
)
1167 if (TREE_CODE (op
) == SSA_NAME
)
1169 /* If the definition may be simulated again we cannot follow
1170 this SSA edge as the SSA propagator does not necessarily
1171 re-visit the use. */
1172 gimple def_stmt
= SSA_NAME_DEF_STMT (op
);
1173 if (!gimple_nop_p (def_stmt
)
1174 && prop_simulate_again_p (def_stmt
))
1176 tree tem
= get_constant_value (op
);
1183 /* CCP specific front-end to the non-destructive constant folding
1186 Attempt to simplify the RHS of STMT knowing that one or more
1187 operands are constants.
1189 If simplification is possible, return the simplified RHS,
1190 otherwise return the original RHS or NULL_TREE. */
1193 ccp_fold (gimple stmt
)
1195 location_t loc
= gimple_location (stmt
);
1196 switch (gimple_code (stmt
))
1200 /* Handle comparison operators that can appear in GIMPLE form. */
1201 tree op0
= valueize_op (gimple_cond_lhs (stmt
));
1202 tree op1
= valueize_op (gimple_cond_rhs (stmt
));
1203 enum tree_code code
= gimple_cond_code (stmt
);
1204 return fold_binary_loc (loc
, code
, boolean_type_node
, op0
, op1
);
1209 /* Return the constant switch index. */
1210 return valueize_op (gimple_switch_index (as_a
<gswitch
*> (stmt
)));
1215 return gimple_fold_stmt_to_constant_1 (stmt
,
1216 valueize_op
, valueize_op_1
);
1223 /* Apply the operation CODE in type TYPE to the value, mask pair
1224 RVAL and RMASK representing a value of type RTYPE and set
1225 the value, mask pair *VAL and *MASK to the result. */
1228 bit_value_unop_1 (enum tree_code code
, tree type
,
1229 widest_int
*val
, widest_int
*mask
,
1230 tree rtype
, const widest_int
&rval
, const widest_int
&rmask
)
1241 widest_int temv
, temm
;
1242 /* Return ~rval + 1. */
1243 bit_value_unop_1 (BIT_NOT_EXPR
, type
, &temv
, &temm
, type
, rval
, rmask
);
1244 bit_value_binop_1 (PLUS_EXPR
, type
, val
, mask
,
1245 type
, temv
, temm
, type
, 1, 0);
1253 /* First extend mask and value according to the original type. */
1254 sgn
= TYPE_SIGN (rtype
);
1255 *mask
= wi::ext (rmask
, TYPE_PRECISION (rtype
), sgn
);
1256 *val
= wi::ext (rval
, TYPE_PRECISION (rtype
), sgn
);
1258 /* Then extend mask and value according to the target type. */
1259 sgn
= TYPE_SIGN (type
);
1260 *mask
= wi::ext (*mask
, TYPE_PRECISION (type
), sgn
);
1261 *val
= wi::ext (*val
, TYPE_PRECISION (type
), sgn
);
1271 /* Apply the operation CODE in type TYPE to the value, mask pairs
1272 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1273 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1276 bit_value_binop_1 (enum tree_code code
, tree type
,
1277 widest_int
*val
, widest_int
*mask
,
1278 tree r1type
, const widest_int
&r1val
,
1279 const widest_int
&r1mask
, tree r2type
,
1280 const widest_int
&r2val
, const widest_int
&r2mask
)
1282 signop sgn
= TYPE_SIGN (type
);
1283 int width
= TYPE_PRECISION (type
);
1284 bool swap_p
= false;
1286 /* Assume we'll get a constant result. Use an initial non varying
1287 value, we fall back to varying in the end if necessary. */
1293 /* The mask is constant where there is a known not
1294 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1295 *mask
= (r1mask
| r2mask
) & (r1val
| r1mask
) & (r2val
| r2mask
);
1296 *val
= r1val
& r2val
;
1300 /* The mask is constant where there is a known
1301 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1302 *mask
= (r1mask
| r2mask
)
1303 .and_not (r1val
.and_not (r1mask
) | r2val
.and_not (r2mask
));
1304 *val
= r1val
| r2val
;
1309 *mask
= r1mask
| r2mask
;
1310 *val
= r1val
^ r2val
;
1317 widest_int shift
= r2val
;
1325 if (wi::neg_p (shift
))
1328 if (code
== RROTATE_EXPR
)
1329 code
= LROTATE_EXPR
;
1331 code
= RROTATE_EXPR
;
1333 if (code
== RROTATE_EXPR
)
1335 *mask
= wi::rrotate (r1mask
, shift
, width
);
1336 *val
= wi::rrotate (r1val
, shift
, width
);
1340 *mask
= wi::lrotate (r1mask
, shift
, width
);
1341 *val
= wi::lrotate (r1val
, shift
, width
);
1349 /* ??? We can handle partially known shift counts if we know
1350 its sign. That way we can tell that (x << (y | 8)) & 255
1354 widest_int shift
= r2val
;
1362 if (wi::neg_p (shift
))
1365 if (code
== RSHIFT_EXPR
)
1370 if (code
== RSHIFT_EXPR
)
1372 *mask
= wi::rshift (wi::ext (r1mask
, width
, sgn
), shift
, sgn
);
1373 *val
= wi::rshift (wi::ext (r1val
, width
, sgn
), shift
, sgn
);
1377 *mask
= wi::ext (wi::lshift (r1mask
, shift
), width
, sgn
);
1378 *val
= wi::ext (wi::lshift (r1val
, shift
), width
, sgn
);
1385 case POINTER_PLUS_EXPR
:
1387 /* Do the addition with unknown bits set to zero, to give carry-ins of
1388 zero wherever possible. */
1389 widest_int lo
= r1val
.and_not (r1mask
) + r2val
.and_not (r2mask
);
1390 lo
= wi::ext (lo
, width
, sgn
);
1391 /* Do the addition with unknown bits set to one, to give carry-ins of
1392 one wherever possible. */
1393 widest_int hi
= (r1val
| r1mask
) + (r2val
| r2mask
);
1394 hi
= wi::ext (hi
, width
, sgn
);
1395 /* Each bit in the result is known if (a) the corresponding bits in
1396 both inputs are known, and (b) the carry-in to that bit position
1397 is known. We can check condition (b) by seeing if we got the same
1398 result with minimised carries as with maximised carries. */
1399 *mask
= r1mask
| r2mask
| (lo
^ hi
);
1400 *mask
= wi::ext (*mask
, width
, sgn
);
1401 /* It shouldn't matter whether we choose lo or hi here. */
1408 widest_int temv
, temm
;
1409 bit_value_unop_1 (NEGATE_EXPR
, r2type
, &temv
, &temm
,
1410 r2type
, r2val
, r2mask
);
1411 bit_value_binop_1 (PLUS_EXPR
, type
, val
, mask
,
1412 r1type
, r1val
, r1mask
,
1413 r2type
, temv
, temm
);
1419 /* Just track trailing zeros in both operands and transfer
1420 them to the other. */
1421 int r1tz
= wi::ctz (r1val
| r1mask
);
1422 int r2tz
= wi::ctz (r2val
| r2mask
);
1423 if (r1tz
+ r2tz
>= width
)
1428 else if (r1tz
+ r2tz
> 0)
1430 *mask
= wi::ext (wi::mask
<widest_int
> (r1tz
+ r2tz
, true),
1440 widest_int m
= r1mask
| r2mask
;
1441 if (r1val
.and_not (m
) != r2val
.and_not (m
))
1444 *val
= ((code
== EQ_EXPR
) ? 0 : 1);
1448 /* We know the result of a comparison is always one or zero. */
1458 code
= swap_tree_comparison (code
);
1465 const widest_int
&o1val
= swap_p
? r2val
: r1val
;
1466 const widest_int
&o1mask
= swap_p
? r2mask
: r1mask
;
1467 const widest_int
&o2val
= swap_p
? r1val
: r2val
;
1468 const widest_int
&o2mask
= swap_p
? r1mask
: r2mask
;
1470 /* If the most significant bits are not known we know nothing. */
1471 if (wi::neg_p (o1mask
) || wi::neg_p (o2mask
))
1474 /* For comparisons the signedness is in the comparison operands. */
1475 sgn
= TYPE_SIGN (r1type
);
1477 /* If we know the most significant bits we know the values
1478 value ranges by means of treating varying bits as zero
1479 or one. Do a cross comparison of the max/min pairs. */
1480 maxmin
= wi::cmp (o1val
| o1mask
, o2val
.and_not (o2mask
), sgn
);
1481 minmax
= wi::cmp (o1val
.and_not (o1mask
), o2val
| o2mask
, sgn
);
1482 if (maxmin
< 0) /* o1 is less than o2. */
1487 else if (minmax
> 0) /* o1 is not less or equal to o2. */
1492 else if (maxmin
== minmax
) /* o1 and o2 are equal. */
1494 /* This probably should never happen as we'd have
1495 folded the thing during fully constant value folding. */
1497 *val
= (code
== LE_EXPR
? 1 : 0);
1501 /* We know the result of a comparison is always one or zero. */
1512 /* Return the propagation value when applying the operation CODE to
1513 the value RHS yielding type TYPE. */
1515 static ccp_prop_value_t
1516 bit_value_unop (enum tree_code code
, tree type
, tree rhs
)
1518 ccp_prop_value_t rval
= get_value_for_expr (rhs
, true);
1519 widest_int value
, mask
;
1520 ccp_prop_value_t val
;
1522 if (rval
.lattice_val
== UNDEFINED
)
1525 gcc_assert ((rval
.lattice_val
== CONSTANT
1526 && TREE_CODE (rval
.value
) == INTEGER_CST
)
1527 || wi::sext (rval
.mask
, TYPE_PRECISION (TREE_TYPE (rhs
))) == -1);
1528 bit_value_unop_1 (code
, type
, &value
, &mask
,
1529 TREE_TYPE (rhs
), value_to_wide_int (rval
), rval
.mask
);
1530 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1532 val
.lattice_val
= CONSTANT
;
1534 /* ??? Delay building trees here. */
1535 val
.value
= wide_int_to_tree (type
, value
);
1539 val
.lattice_val
= VARYING
;
1540 val
.value
= NULL_TREE
;
1546 /* Return the propagation value when applying the operation CODE to
1547 the values RHS1 and RHS2 yielding type TYPE. */
1549 static ccp_prop_value_t
1550 bit_value_binop (enum tree_code code
, tree type
, tree rhs1
, tree rhs2
)
1552 ccp_prop_value_t r1val
= get_value_for_expr (rhs1
, true);
1553 ccp_prop_value_t r2val
= get_value_for_expr (rhs2
, true);
1554 widest_int value
, mask
;
1555 ccp_prop_value_t val
;
1557 if (r1val
.lattice_val
== UNDEFINED
1558 || r2val
.lattice_val
== UNDEFINED
)
1560 val
.lattice_val
= VARYING
;
1561 val
.value
= NULL_TREE
;
1566 gcc_assert ((r1val
.lattice_val
== CONSTANT
1567 && TREE_CODE (r1val
.value
) == INTEGER_CST
)
1568 || wi::sext (r1val
.mask
,
1569 TYPE_PRECISION (TREE_TYPE (rhs1
))) == -1);
1570 gcc_assert ((r2val
.lattice_val
== CONSTANT
1571 && TREE_CODE (r2val
.value
) == INTEGER_CST
)
1572 || wi::sext (r2val
.mask
,
1573 TYPE_PRECISION (TREE_TYPE (rhs2
))) == -1);
1574 bit_value_binop_1 (code
, type
, &value
, &mask
,
1575 TREE_TYPE (rhs1
), value_to_wide_int (r1val
), r1val
.mask
,
1576 TREE_TYPE (rhs2
), value_to_wide_int (r2val
), r2val
.mask
);
1577 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1579 val
.lattice_val
= CONSTANT
;
1581 /* ??? Delay building trees here. */
1582 val
.value
= wide_int_to_tree (type
, value
);
1586 val
.lattice_val
= VARYING
;
1587 val
.value
= NULL_TREE
;
1593 /* Return the propagation value for __builtin_assume_aligned
1594 and functions with assume_aligned or alloc_aligned attribute.
1595 For __builtin_assume_aligned, ATTR is NULL_TREE,
1596 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1597 is false, for alloc_aligned attribute ATTR is non-NULL and
1598 ALLOC_ALIGNED is true. */
1600 static ccp_prop_value_t
1601 bit_value_assume_aligned (gimple stmt
, tree attr
, ccp_prop_value_t ptrval
,
1604 tree align
, misalign
= NULL_TREE
, type
;
1605 unsigned HOST_WIDE_INT aligni
, misaligni
= 0;
1606 ccp_prop_value_t alignval
;
1607 widest_int value
, mask
;
1608 ccp_prop_value_t val
;
1610 if (attr
== NULL_TREE
)
1612 tree ptr
= gimple_call_arg (stmt
, 0);
1613 type
= TREE_TYPE (ptr
);
1614 ptrval
= get_value_for_expr (ptr
, true);
1618 tree lhs
= gimple_call_lhs (stmt
);
1619 type
= TREE_TYPE (lhs
);
1622 if (ptrval
.lattice_val
== UNDEFINED
)
1624 gcc_assert ((ptrval
.lattice_val
== CONSTANT
1625 && TREE_CODE (ptrval
.value
) == INTEGER_CST
)
1626 || wi::sext (ptrval
.mask
, TYPE_PRECISION (type
)) == -1);
1627 if (attr
== NULL_TREE
)
1629 /* Get aligni and misaligni from __builtin_assume_aligned. */
1630 align
= gimple_call_arg (stmt
, 1);
1631 if (!tree_fits_uhwi_p (align
))
1633 aligni
= tree_to_uhwi (align
);
1634 if (gimple_call_num_args (stmt
) > 2)
1636 misalign
= gimple_call_arg (stmt
, 2);
1637 if (!tree_fits_uhwi_p (misalign
))
1639 misaligni
= tree_to_uhwi (misalign
);
1644 /* Get aligni and misaligni from assume_aligned or
1645 alloc_align attributes. */
1646 if (TREE_VALUE (attr
) == NULL_TREE
)
1648 attr
= TREE_VALUE (attr
);
1649 align
= TREE_VALUE (attr
);
1650 if (!tree_fits_uhwi_p (align
))
1652 aligni
= tree_to_uhwi (align
);
1655 if (aligni
== 0 || aligni
> gimple_call_num_args (stmt
))
1657 align
= gimple_call_arg (stmt
, aligni
- 1);
1658 if (!tree_fits_uhwi_p (align
))
1660 aligni
= tree_to_uhwi (align
);
1662 else if (TREE_CHAIN (attr
) && TREE_VALUE (TREE_CHAIN (attr
)))
1664 misalign
= TREE_VALUE (TREE_CHAIN (attr
));
1665 if (!tree_fits_uhwi_p (misalign
))
1667 misaligni
= tree_to_uhwi (misalign
);
1670 if (aligni
<= 1 || (aligni
& (aligni
- 1)) != 0 || misaligni
>= aligni
)
1673 align
= build_int_cst_type (type
, -aligni
);
1674 alignval
= get_value_for_expr (align
, true);
1675 bit_value_binop_1 (BIT_AND_EXPR
, type
, &value
, &mask
,
1676 type
, value_to_wide_int (ptrval
), ptrval
.mask
,
1677 type
, value_to_wide_int (alignval
), alignval
.mask
);
1678 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1680 val
.lattice_val
= CONSTANT
;
1682 gcc_assert ((mask
.to_uhwi () & (aligni
- 1)) == 0);
1683 gcc_assert ((value
.to_uhwi () & (aligni
- 1)) == 0);
1685 /* ??? Delay building trees here. */
1686 val
.value
= wide_int_to_tree (type
, value
);
1690 val
.lattice_val
= VARYING
;
1691 val
.value
= NULL_TREE
;
1697 /* Evaluate statement STMT.
1698 Valid only for assignments, calls, conditionals, and switches. */
1700 static ccp_prop_value_t
1701 evaluate_stmt (gimple stmt
)
1703 ccp_prop_value_t val
;
1704 tree simplified
= NULL_TREE
;
1705 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1706 bool is_constant
= false;
1709 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1711 fprintf (dump_file
, "which is likely ");
1712 switch (likelyvalue
)
1715 fprintf (dump_file
, "CONSTANT");
1718 fprintf (dump_file
, "UNDEFINED");
1721 fprintf (dump_file
, "VARYING");
1725 fprintf (dump_file
, "\n");
1728 /* If the statement is likely to have a CONSTANT result, then try
1729 to fold the statement to determine the constant value. */
1730 /* FIXME. This is the only place that we call ccp_fold.
1731 Since likely_value never returns CONSTANT for calls, we will
1732 not attempt to fold them, including builtins that may profit. */
1733 if (likelyvalue
== CONSTANT
)
1735 fold_defer_overflow_warnings ();
1736 simplified
= ccp_fold (stmt
);
1737 if (simplified
&& TREE_CODE (simplified
) == SSA_NAME
)
1739 val
= *get_value (simplified
);
1740 if (val
.lattice_val
!= VARYING
)
1742 fold_undefer_overflow_warnings (true, stmt
, 0);
1746 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1747 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1750 /* The statement produced a constant value. */
1751 val
.lattice_val
= CONSTANT
;
1752 val
.value
= simplified
;
1757 /* If the statement is likely to have a VARYING result, then do not
1758 bother folding the statement. */
1759 else if (likelyvalue
== VARYING
)
1761 enum gimple_code code
= gimple_code (stmt
);
1762 if (code
== GIMPLE_ASSIGN
)
1764 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1766 /* Other cases cannot satisfy is_gimple_min_invariant
1768 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1769 simplified
= gimple_assign_rhs1 (stmt
);
1771 else if (code
== GIMPLE_SWITCH
)
1772 simplified
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
1774 /* These cannot satisfy is_gimple_min_invariant without folding. */
1775 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1776 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1779 /* The statement produced a constant value. */
1780 val
.lattice_val
= CONSTANT
;
1781 val
.value
= simplified
;
1785 /* If the statement result is likely UNDEFINED, make it so. */
1786 else if (likelyvalue
== UNDEFINED
)
1788 val
.lattice_val
= UNDEFINED
;
1789 val
.value
= NULL_TREE
;
1794 /* Resort to simplification for bitwise tracking. */
1795 if (flag_tree_bit_ccp
1796 && (likelyvalue
== CONSTANT
|| is_gimple_call (stmt
)
1797 || (gimple_assign_single_p (stmt
)
1798 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
))
1801 enum gimple_code code
= gimple_code (stmt
);
1802 val
.lattice_val
= VARYING
;
1803 val
.value
= NULL_TREE
;
1805 if (code
== GIMPLE_ASSIGN
)
1807 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1808 tree rhs1
= gimple_assign_rhs1 (stmt
);
1809 tree lhs
= gimple_assign_lhs (stmt
);
1810 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
1811 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
1812 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1813 || POINTER_TYPE_P (TREE_TYPE (rhs1
))))
1814 switch (get_gimple_rhs_class (subcode
))
1816 case GIMPLE_SINGLE_RHS
:
1817 val
= get_value_for_expr (rhs1
, true);
1820 case GIMPLE_UNARY_RHS
:
1821 val
= bit_value_unop (subcode
, TREE_TYPE (lhs
), rhs1
);
1824 case GIMPLE_BINARY_RHS
:
1825 val
= bit_value_binop (subcode
, TREE_TYPE (lhs
), rhs1
,
1826 gimple_assign_rhs2 (stmt
));
1832 else if (code
== GIMPLE_COND
)
1834 enum tree_code code
= gimple_cond_code (stmt
);
1835 tree rhs1
= gimple_cond_lhs (stmt
);
1836 tree rhs2
= gimple_cond_rhs (stmt
);
1837 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1838 || POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1839 val
= bit_value_binop (code
, TREE_TYPE (rhs1
), rhs1
, rhs2
);
1841 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
1843 tree fndecl
= gimple_call_fndecl (stmt
);
1844 switch (DECL_FUNCTION_CODE (fndecl
))
1846 case BUILT_IN_MALLOC
:
1847 case BUILT_IN_REALLOC
:
1848 case BUILT_IN_CALLOC
:
1849 case BUILT_IN_STRDUP
:
1850 case BUILT_IN_STRNDUP
:
1851 val
.lattice_val
= CONSTANT
;
1852 val
.value
= build_int_cst (TREE_TYPE (gimple_get_lhs (stmt
)), 0);
1853 val
.mask
= ~((HOST_WIDE_INT
) MALLOC_ABI_ALIGNMENT
1854 / BITS_PER_UNIT
- 1);
1857 case BUILT_IN_ALLOCA
:
1858 case BUILT_IN_ALLOCA_WITH_ALIGN
:
1859 align
= (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
1860 ? TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1))
1861 : BIGGEST_ALIGNMENT
);
1862 val
.lattice_val
= CONSTANT
;
1863 val
.value
= build_int_cst (TREE_TYPE (gimple_get_lhs (stmt
)), 0);
1864 val
.mask
= ~((HOST_WIDE_INT
) align
/ BITS_PER_UNIT
- 1);
1867 /* These builtins return their first argument, unmodified. */
1868 case BUILT_IN_MEMCPY
:
1869 case BUILT_IN_MEMMOVE
:
1870 case BUILT_IN_MEMSET
:
1871 case BUILT_IN_STRCPY
:
1872 case BUILT_IN_STRNCPY
:
1873 case BUILT_IN_MEMCPY_CHK
:
1874 case BUILT_IN_MEMMOVE_CHK
:
1875 case BUILT_IN_MEMSET_CHK
:
1876 case BUILT_IN_STRCPY_CHK
:
1877 case BUILT_IN_STRNCPY_CHK
:
1878 val
= get_value_for_expr (gimple_call_arg (stmt
, 0), true);
1881 case BUILT_IN_ASSUME_ALIGNED
:
1882 val
= bit_value_assume_aligned (stmt
, NULL_TREE
, val
, false);
1885 case BUILT_IN_ALIGNED_ALLOC
:
1887 tree align
= get_constant_value (gimple_call_arg (stmt
, 0));
1889 && tree_fits_uhwi_p (align
))
1891 unsigned HOST_WIDE_INT aligni
= tree_to_uhwi (align
);
1893 /* align must be power-of-two */
1894 && (aligni
& (aligni
- 1)) == 0)
1896 val
.lattice_val
= CONSTANT
;
1897 val
.value
= build_int_cst (ptr_type_node
, 0);
1907 if (is_gimple_call (stmt
) && gimple_call_lhs (stmt
))
1909 tree fntype
= gimple_call_fntype (stmt
);
1912 tree attrs
= lookup_attribute ("assume_aligned",
1913 TYPE_ATTRIBUTES (fntype
));
1915 val
= bit_value_assume_aligned (stmt
, attrs
, val
, false);
1916 attrs
= lookup_attribute ("alloc_align",
1917 TYPE_ATTRIBUTES (fntype
));
1919 val
= bit_value_assume_aligned (stmt
, attrs
, val
, true);
1922 is_constant
= (val
.lattice_val
== CONSTANT
);
1925 if (flag_tree_bit_ccp
1926 && ((is_constant
&& TREE_CODE (val
.value
) == INTEGER_CST
)
1928 && gimple_get_lhs (stmt
)
1929 && TREE_CODE (gimple_get_lhs (stmt
)) == SSA_NAME
)
1931 tree lhs
= gimple_get_lhs (stmt
);
1932 wide_int nonzero_bits
= get_nonzero_bits (lhs
);
1933 if (nonzero_bits
!= -1)
1937 val
.lattice_val
= CONSTANT
;
1938 val
.value
= build_zero_cst (TREE_TYPE (lhs
));
1939 val
.mask
= extend_mask (nonzero_bits
);
1944 if (wi::bit_and_not (val
.value
, nonzero_bits
) != 0)
1945 val
.value
= wide_int_to_tree (TREE_TYPE (lhs
),
1946 nonzero_bits
& val
.value
);
1947 if (nonzero_bits
== 0)
1950 val
.mask
= val
.mask
& extend_mask (nonzero_bits
);
1955 /* The statement produced a nonconstant value. */
1958 /* The statement produced a copy. */
1959 if (simplified
&& TREE_CODE (simplified
) == SSA_NAME
1960 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified
))
1962 val
.lattice_val
= CONSTANT
;
1963 val
.value
= simplified
;
1966 /* The statement is VARYING. */
1969 val
.lattice_val
= VARYING
;
1970 val
.value
= NULL_TREE
;
1978 typedef hash_table
<pointer_hash
<gimple_statement_base
> > gimple_htab
;
1980 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1981 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1984 insert_clobber_before_stack_restore (tree saved_val
, tree var
,
1985 gimple_htab
**visited
)
1988 gassign
*clobber_stmt
;
1990 imm_use_iterator iter
;
1991 gimple_stmt_iterator i
;
1994 FOR_EACH_IMM_USE_STMT (stmt
, iter
, saved_val
)
1995 if (gimple_call_builtin_p (stmt
, BUILT_IN_STACK_RESTORE
))
1997 clobber
= build_constructor (TREE_TYPE (var
),
1999 TREE_THIS_VOLATILE (clobber
) = 1;
2000 clobber_stmt
= gimple_build_assign (var
, clobber
);
2002 i
= gsi_for_stmt (stmt
);
2003 gsi_insert_before (&i
, clobber_stmt
, GSI_SAME_STMT
);
2005 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2008 *visited
= new gimple_htab (10);
2010 slot
= (*visited
)->find_slot (stmt
, INSERT
);
2015 insert_clobber_before_stack_restore (gimple_phi_result (stmt
), var
,
2018 else if (gimple_assign_ssa_name_copy_p (stmt
))
2019 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt
), var
,
2021 else if (chkp_gimple_call_builtin_p (stmt
, BUILT_IN_CHKP_BNDRET
))
2024 gcc_assert (is_gimple_debug (stmt
));
2027 /* Advance the iterator to the previous non-debug gimple statement in the same
2028 or dominating basic block. */
2031 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator
*i
)
2035 gsi_prev_nondebug (i
);
2036 while (gsi_end_p (*i
))
2038 dom
= get_immediate_dominator (CDI_DOMINATORS
, i
->bb
);
2039 if (dom
== NULL
|| dom
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
2042 *i
= gsi_last_bb (dom
);
2046 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
2047 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2049 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
2050 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
2051 that case the function gives up without inserting the clobbers. */
2054 insert_clobbers_for_var (gimple_stmt_iterator i
, tree var
)
2058 gimple_htab
*visited
= NULL
;
2060 for (; !gsi_end_p (i
); gsi_prev_dom_bb_nondebug (&i
))
2062 stmt
= gsi_stmt (i
);
2064 if (!gimple_call_builtin_p (stmt
, BUILT_IN_STACK_SAVE
))
2067 saved_val
= gimple_call_lhs (stmt
);
2068 if (saved_val
== NULL_TREE
)
2071 insert_clobber_before_stack_restore (saved_val
, var
, &visited
);
2078 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
2079 fixed-size array and returns the address, if found, otherwise returns
2083 fold_builtin_alloca_with_align (gimple stmt
)
2085 unsigned HOST_WIDE_INT size
, threshold
, n_elem
;
2086 tree lhs
, arg
, block
, var
, elem_type
, array_type
;
2089 lhs
= gimple_call_lhs (stmt
);
2090 if (lhs
== NULL_TREE
)
2093 /* Detect constant argument. */
2094 arg
= get_constant_value (gimple_call_arg (stmt
, 0));
2095 if (arg
== NULL_TREE
2096 || TREE_CODE (arg
) != INTEGER_CST
2097 || !tree_fits_uhwi_p (arg
))
2100 size
= tree_to_uhwi (arg
);
2102 /* Heuristic: don't fold large allocas. */
2103 threshold
= (unsigned HOST_WIDE_INT
)PARAM_VALUE (PARAM_LARGE_STACK_FRAME
);
2104 /* In case the alloca is located at function entry, it has the same lifetime
2105 as a declared array, so we allow a larger size. */
2106 block
= gimple_block (stmt
);
2107 if (!(cfun
->after_inlining
2108 && TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
))
2110 if (size
> threshold
)
2113 /* Declare array. */
2114 elem_type
= build_nonstandard_integer_type (BITS_PER_UNIT
, 1);
2115 n_elem
= size
* 8 / BITS_PER_UNIT
;
2116 array_type
= build_array_type_nelts (elem_type
, n_elem
);
2117 var
= create_tmp_var (array_type
);
2118 DECL_ALIGN (var
) = TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1));
2120 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (lhs
);
2121 if (pi
!= NULL
&& !pi
->pt
.anything
)
2125 singleton_p
= pt_solution_singleton_p (&pi
->pt
, &uid
);
2126 gcc_assert (singleton_p
);
2127 SET_DECL_PT_UID (var
, uid
);
2131 /* Fold alloca to the address of the array. */
2132 return fold_convert (TREE_TYPE (lhs
), build_fold_addr_expr (var
));
2135 /* Fold the stmt at *GSI with CCP specific information that propagating
2136 and regular folding does not catch. */
2139 ccp_fold_stmt (gimple_stmt_iterator
*gsi
)
2141 gimple stmt
= gsi_stmt (*gsi
);
2143 switch (gimple_code (stmt
))
2147 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
2148 ccp_prop_value_t val
;
2149 /* Statement evaluation will handle type mismatches in constants
2150 more gracefully than the final propagation. This allows us to
2151 fold more conditionals here. */
2152 val
= evaluate_stmt (stmt
);
2153 if (val
.lattice_val
!= CONSTANT
2159 fprintf (dump_file
, "Folding predicate ");
2160 print_gimple_expr (dump_file
, stmt
, 0, 0);
2161 fprintf (dump_file
, " to ");
2162 print_generic_expr (dump_file
, val
.value
, 0);
2163 fprintf (dump_file
, "\n");
2166 if (integer_zerop (val
.value
))
2167 gimple_cond_make_false (cond_stmt
);
2169 gimple_cond_make_true (cond_stmt
);
2176 tree lhs
= gimple_call_lhs (stmt
);
2177 int flags
= gimple_call_flags (stmt
);
2180 bool changed
= false;
2183 /* If the call was folded into a constant make sure it goes
2184 away even if we cannot propagate into all uses because of
2187 && TREE_CODE (lhs
) == SSA_NAME
2188 && (val
= get_constant_value (lhs
))
2189 /* Don't optimize away calls that have side-effects. */
2190 && (flags
& (ECF_CONST
|ECF_PURE
)) != 0
2191 && (flags
& ECF_LOOPING_CONST_OR_PURE
) == 0)
2193 tree new_rhs
= unshare_expr (val
);
2195 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
2196 TREE_TYPE (new_rhs
)))
2197 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
2198 res
= update_call_from_tree (gsi
, new_rhs
);
2203 /* Internal calls provide no argument types, so the extra laxity
2204 for normal calls does not apply. */
2205 if (gimple_call_internal_p (stmt
))
2208 /* The heuristic of fold_builtin_alloca_with_align differs before and
2209 after inlining, so we don't require the arg to be changed into a
2210 constant for folding, but just to be constant. */
2211 if (gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
2213 tree new_rhs
= fold_builtin_alloca_with_align (stmt
);
2216 bool res
= update_call_from_tree (gsi
, new_rhs
);
2217 tree var
= TREE_OPERAND (TREE_OPERAND (new_rhs
, 0),0);
2219 insert_clobbers_for_var (*gsi
, var
);
2224 /* Propagate into the call arguments. Compared to replace_uses_in
2225 this can use the argument slot types for type verification
2226 instead of the current argument type. We also can safely
2227 drop qualifiers here as we are dealing with constants anyway. */
2228 argt
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
2229 for (i
= 0; i
< gimple_call_num_args (stmt
) && argt
;
2230 ++i
, argt
= TREE_CHAIN (argt
))
2232 tree arg
= gimple_call_arg (stmt
, i
);
2233 if (TREE_CODE (arg
) == SSA_NAME
2234 && (val
= get_constant_value (arg
))
2235 && useless_type_conversion_p
2236 (TYPE_MAIN_VARIANT (TREE_VALUE (argt
)),
2237 TYPE_MAIN_VARIANT (TREE_TYPE (val
))))
2239 gimple_call_set_arg (stmt
, i
, unshare_expr (val
));
2249 tree lhs
= gimple_assign_lhs (stmt
);
2252 /* If we have a load that turned out to be constant replace it
2253 as we cannot propagate into all uses in all cases. */
2254 if (gimple_assign_single_p (stmt
)
2255 && TREE_CODE (lhs
) == SSA_NAME
2256 && (val
= get_constant_value (lhs
)))
2258 tree rhs
= unshare_expr (val
);
2259 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2260 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
2261 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
2273 /* Visit the assignment statement STMT. Set the value of its LHS to the
2274 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2275 creates virtual definitions, set the value of each new name to that
2276 of the RHS (if we can derive a constant out of the RHS).
2277 Value-returning call statements also perform an assignment, and
2278 are handled here. */
2280 static enum ssa_prop_result
2281 visit_assignment (gimple stmt
, tree
*output_p
)
2283 ccp_prop_value_t val
;
2284 enum ssa_prop_result retval
= SSA_PROP_NOT_INTERESTING
;
2286 tree lhs
= gimple_get_lhs (stmt
);
2287 if (TREE_CODE (lhs
) == SSA_NAME
)
2289 /* Evaluate the statement, which could be
2290 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2291 val
= evaluate_stmt (stmt
);
2293 /* If STMT is an assignment to an SSA_NAME, we only have one
2295 if (set_lattice_value (lhs
, &val
))
2298 if (val
.lattice_val
== VARYING
)
2299 retval
= SSA_PROP_VARYING
;
2301 retval
= SSA_PROP_INTERESTING
;
2309 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2310 if it can determine which edge will be taken. Otherwise, return
2311 SSA_PROP_VARYING. */
2313 static enum ssa_prop_result
2314 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
2316 ccp_prop_value_t val
;
2319 block
= gimple_bb (stmt
);
2320 val
= evaluate_stmt (stmt
);
2321 if (val
.lattice_val
!= CONSTANT
2323 return SSA_PROP_VARYING
;
2325 /* Find which edge out of the conditional block will be taken and add it
2326 to the worklist. If no single edge can be determined statically,
2327 return SSA_PROP_VARYING to feed all the outgoing edges to the
2328 propagation engine. */
2329 *taken_edge_p
= find_taken_edge (block
, val
.value
);
2331 return SSA_PROP_INTERESTING
;
2333 return SSA_PROP_VARYING
;
2337 /* Evaluate statement STMT. If the statement produces an output value and
2338 its evaluation changes the lattice value of its output, return
2339 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2342 If STMT is a conditional branch and we can determine its truth
2343 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2344 value, return SSA_PROP_VARYING. */
2346 static enum ssa_prop_result
2347 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
2352 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2354 fprintf (dump_file
, "\nVisiting statement:\n");
2355 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2358 switch (gimple_code (stmt
))
2361 /* If the statement is an assignment that produces a single
2362 output value, evaluate its RHS to see if the lattice value of
2363 its output has changed. */
2364 return visit_assignment (stmt
, output_p
);
2367 /* A value-returning call also performs an assignment. */
2368 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2369 return visit_assignment (stmt
, output_p
);
2374 /* If STMT is a conditional branch, see if we can determine
2375 which branch will be taken. */
2376 /* FIXME. It appears that we should be able to optimize
2377 computed GOTOs here as well. */
2378 return visit_cond_stmt (stmt
, taken_edge_p
);
2384 /* Any other kind of statement is not interesting for constant
2385 propagation and, therefore, not worth simulating. */
2386 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2387 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
2389 /* Definitions made by statements other than assignments to
2390 SSA_NAMEs represent unknown modifications to their outputs.
2391 Mark them VARYING. */
2392 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2393 set_value_varying (def
);
2395 return SSA_PROP_VARYING
;
2399 /* Main entry point for SSA Conditional Constant Propagation. */
2404 unsigned int todo
= 0;
2405 calculate_dominance_info (CDI_DOMINATORS
);
2407 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
2408 if (ccp_finalize ())
2409 todo
= (TODO_cleanup_cfg
| TODO_update_ssa
);
2410 free_dominance_info (CDI_DOMINATORS
);
2417 const pass_data pass_data_ccp
=
2419 GIMPLE_PASS
, /* type */
2421 OPTGROUP_NONE
, /* optinfo_flags */
2422 TV_TREE_CCP
, /* tv_id */
2423 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2424 0, /* properties_provided */
2425 0, /* properties_destroyed */
2426 0, /* todo_flags_start */
2427 TODO_update_address_taken
, /* todo_flags_finish */
2430 class pass_ccp
: public gimple_opt_pass
2433 pass_ccp (gcc::context
*ctxt
)
2434 : gimple_opt_pass (pass_data_ccp
, ctxt
)
2437 /* opt_pass methods: */
2438 opt_pass
* clone () { return new pass_ccp (m_ctxt
); }
2439 virtual bool gate (function
*) { return flag_tree_ccp
!= 0; }
2440 virtual unsigned int execute (function
*) { return do_ssa_ccp (); }
2442 }; // class pass_ccp
2447 make_pass_ccp (gcc::context
*ctxt
)
2449 return new pass_ccp (ctxt
);
2454 /* Try to optimize out __builtin_stack_restore. Optimize it out
2455 if there is another __builtin_stack_restore in the same basic
2456 block and no calls or ASM_EXPRs are in between, or if this block's
2457 only outgoing edge is to EXIT_BLOCK and there are no calls or
2458 ASM_EXPRs after this __builtin_stack_restore. */
2461 optimize_stack_restore (gimple_stmt_iterator i
)
2466 basic_block bb
= gsi_bb (i
);
2467 gimple call
= gsi_stmt (i
);
2469 if (gimple_code (call
) != GIMPLE_CALL
2470 || gimple_call_num_args (call
) != 1
2471 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
2472 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
2475 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
2477 stmt
= gsi_stmt (i
);
2478 if (gimple_code (stmt
) == GIMPLE_ASM
)
2480 if (gimple_code (stmt
) != GIMPLE_CALL
)
2483 callee
= gimple_call_fndecl (stmt
);
2485 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2486 /* All regular builtins are ok, just obviously not alloca. */
2487 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
2488 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA_WITH_ALIGN
)
2491 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
2492 goto second_stack_restore
;
2498 /* Allow one successor of the exit block, or zero successors. */
2499 switch (EDGE_COUNT (bb
->succs
))
2504 if (single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2510 second_stack_restore
:
2512 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2513 If there are multiple uses, then the last one should remove the call.
2514 In any case, whether the call to __builtin_stack_save can be removed
2515 or not is irrelevant to removing the call to __builtin_stack_restore. */
2516 if (has_single_use (gimple_call_arg (call
, 0)))
2518 gimple stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
2519 if (is_gimple_call (stack_save
))
2521 callee
= gimple_call_fndecl (stack_save
);
2523 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
2524 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
)
2526 gimple_stmt_iterator stack_save_gsi
;
2529 stack_save_gsi
= gsi_for_stmt (stack_save
);
2530 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
2531 update_call_from_tree (&stack_save_gsi
, rhs
);
2536 /* No effect, so the statement will be deleted. */
2537 return integer_zero_node
;
2540 /* If va_list type is a simple pointer and nothing special is needed,
2541 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2542 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2543 pointer assignment. */
2546 optimize_stdarg_builtin (gimple call
)
2548 tree callee
, lhs
, rhs
, cfun_va_list
;
2549 bool va_list_simple_ptr
;
2550 location_t loc
= gimple_location (call
);
2552 if (gimple_code (call
) != GIMPLE_CALL
)
2555 callee
= gimple_call_fndecl (call
);
2557 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
2558 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
2559 && (TREE_TYPE (cfun_va_list
) == void_type_node
2560 || TREE_TYPE (cfun_va_list
) == char_type_node
);
2562 switch (DECL_FUNCTION_CODE (callee
))
2564 case BUILT_IN_VA_START
:
2565 if (!va_list_simple_ptr
2566 || targetm
.expand_builtin_va_start
!= NULL
2567 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG
))
2570 if (gimple_call_num_args (call
) != 2)
2573 lhs
= gimple_call_arg (call
, 0);
2574 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2575 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2576 != TYPE_MAIN_VARIANT (cfun_va_list
))
2579 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
2580 rhs
= build_call_expr_loc (loc
, builtin_decl_explicit (BUILT_IN_NEXT_ARG
),
2581 1, integer_zero_node
);
2582 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
2583 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2585 case BUILT_IN_VA_COPY
:
2586 if (!va_list_simple_ptr
)
2589 if (gimple_call_num_args (call
) != 2)
2592 lhs
= gimple_call_arg (call
, 0);
2593 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2594 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2595 != TYPE_MAIN_VARIANT (cfun_va_list
))
2598 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
2599 rhs
= gimple_call_arg (call
, 1);
2600 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
2601 != TYPE_MAIN_VARIANT (cfun_va_list
))
2604 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
2605 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2607 case BUILT_IN_VA_END
:
2608 /* No effect, so the statement will be deleted. */
2609 return integer_zero_node
;
2616 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2617 the incoming jumps. Return true if at least one jump was changed. */
2620 optimize_unreachable (gimple_stmt_iterator i
)
2622 basic_block bb
= gsi_bb (i
);
2623 gimple_stmt_iterator gsi
;
2629 if (flag_sanitize
& SANITIZE_UNREACHABLE
)
2632 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2634 stmt
= gsi_stmt (gsi
);
2636 if (is_gimple_debug (stmt
))
2639 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2641 /* Verify we do not need to preserve the label. */
2642 if (FORCED_LABEL (gimple_label_label (label_stmt
)))
2648 /* Only handle the case that __builtin_unreachable is the first statement
2649 in the block. We rely on DCE to remove stmts without side-effects
2650 before __builtin_unreachable. */
2651 if (gsi_stmt (gsi
) != gsi_stmt (i
))
2656 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2658 gsi
= gsi_last_bb (e
->src
);
2659 if (gsi_end_p (gsi
))
2662 stmt
= gsi_stmt (gsi
);
2663 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
2665 if (e
->flags
& EDGE_TRUE_VALUE
)
2666 gimple_cond_make_false (cond_stmt
);
2667 else if (e
->flags
& EDGE_FALSE_VALUE
)
2668 gimple_cond_make_true (cond_stmt
);
2671 update_stmt (cond_stmt
);
2675 /* Todo: handle other cases, f.i. switch statement. */
2685 /* A simple pass that attempts to fold all builtin functions. This pass
2686 is run after we've propagated as many constants as we can. */
2690 const pass_data pass_data_fold_builtins
=
2692 GIMPLE_PASS
, /* type */
2694 OPTGROUP_NONE
, /* optinfo_flags */
2695 TV_NONE
, /* tv_id */
2696 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2697 0, /* properties_provided */
2698 0, /* properties_destroyed */
2699 0, /* todo_flags_start */
2700 TODO_update_ssa
, /* todo_flags_finish */
2703 class pass_fold_builtins
: public gimple_opt_pass
2706 pass_fold_builtins (gcc::context
*ctxt
)
2707 : gimple_opt_pass (pass_data_fold_builtins
, ctxt
)
2710 /* opt_pass methods: */
2711 opt_pass
* clone () { return new pass_fold_builtins (m_ctxt
); }
2712 virtual unsigned int execute (function
*);
2714 }; // class pass_fold_builtins
2717 pass_fold_builtins::execute (function
*fun
)
2719 bool cfg_changed
= false;
2721 unsigned int todoflags
= 0;
2723 FOR_EACH_BB_FN (bb
, fun
)
2725 gimple_stmt_iterator i
;
2726 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
2728 gimple stmt
, old_stmt
;
2730 enum built_in_function fcode
;
2732 stmt
= gsi_stmt (i
);
2734 if (gimple_code (stmt
) != GIMPLE_CALL
)
2736 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2737 after the last GIMPLE DSE they aren't needed and might
2738 unnecessarily keep the SSA_NAMEs live. */
2739 if (gimple_clobber_p (stmt
))
2741 tree lhs
= gimple_assign_lhs (stmt
);
2742 if (TREE_CODE (lhs
) == MEM_REF
2743 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
2745 unlink_stmt_vdef (stmt
);
2746 gsi_remove (&i
, true);
2747 release_defs (stmt
);
2755 callee
= gimple_call_fndecl (stmt
);
2756 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2762 fcode
= DECL_FUNCTION_CODE (callee
);
2767 tree result
= NULL_TREE
;
2768 switch (DECL_FUNCTION_CODE (callee
))
2770 case BUILT_IN_CONSTANT_P
:
2771 /* Resolve __builtin_constant_p. If it hasn't been
2772 folded to integer_one_node by now, it's fairly
2773 certain that the value simply isn't constant. */
2774 result
= integer_zero_node
;
2777 case BUILT_IN_ASSUME_ALIGNED
:
2778 /* Remove __builtin_assume_aligned. */
2779 result
= gimple_call_arg (stmt
, 0);
2782 case BUILT_IN_STACK_RESTORE
:
2783 result
= optimize_stack_restore (i
);
2789 case BUILT_IN_UNREACHABLE
:
2790 if (optimize_unreachable (i
))
2794 case BUILT_IN_VA_START
:
2795 case BUILT_IN_VA_END
:
2796 case BUILT_IN_VA_COPY
:
2797 /* These shouldn't be folded before pass_stdarg. */
2798 result
= optimize_stdarg_builtin (stmt
);
2812 if (!update_call_from_tree (&i
, result
))
2813 gimplify_and_update_call_from_tree (&i
, result
);
2816 todoflags
|= TODO_update_address_taken
;
2818 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2820 fprintf (dump_file
, "Simplified\n ");
2821 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2825 stmt
= gsi_stmt (i
);
2828 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
2829 && gimple_purge_dead_eh_edges (bb
))
2832 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2834 fprintf (dump_file
, "to\n ");
2835 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2836 fprintf (dump_file
, "\n");
2839 /* Retry the same statement if it changed into another
2840 builtin, there might be new opportunities now. */
2841 if (gimple_code (stmt
) != GIMPLE_CALL
)
2846 callee
= gimple_call_fndecl (stmt
);
2848 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2849 || DECL_FUNCTION_CODE (callee
) == fcode
)
2854 /* Delete unreachable blocks. */
2856 todoflags
|= TODO_cleanup_cfg
;
2864 make_pass_fold_builtins (gcc::context
*ctxt
)
2866 return new pass_fold_builtins (ctxt
);