1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2018 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
43 CONSTANT -> V_i has been found to hold a constant
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
80 a_11 = PHI (a_9, a_10)
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
123 #include "coretypes.h"
128 #include "tree-pass.h"
130 #include "gimple-pretty-print.h"
131 #include "fold-const.h"
132 #include "gimple-fold.h"
134 #include "gimplify.h"
135 #include "gimple-iterator.h"
136 #include "tree-cfg.h"
137 #include "tree-ssa-propagate.h"
140 #include "builtins.h"
142 #include "stor-layout.h"
143 #include "optabs-query.h"
144 #include "tree-ssa-ccp.h"
145 #include "tree-dfa.h"
146 #include "diagnostic-core.h"
147 #include "stringpool.h"
149 #include "tree-vector-builder.h"
151 /* Possible lattice values. */
160 struct ccp_prop_value_t
{
162 ccp_lattice_t lattice_val
;
164 /* Propagated value. */
167 /* Mask that applies to the propagated value during CCP. For X
168 with a CONSTANT lattice value X & ~mask == value & ~mask. The
169 zero bits in the mask cover constant values. The ones mean no
174 class ccp_propagate
: public ssa_propagation_engine
177 enum ssa_prop_result
visit_stmt (gimple
*, edge
*, tree
*) FINAL OVERRIDE
;
178 enum ssa_prop_result
visit_phi (gphi
*) FINAL OVERRIDE
;
181 /* Array of propagated constant values. After propagation,
182 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
183 the constant is held in an SSA name representing a memory store
184 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
185 memory reference used to store (i.e., the LHS of the assignment
187 static ccp_prop_value_t
*const_val
;
188 static unsigned n_const_val
;
190 static void canonicalize_value (ccp_prop_value_t
*);
191 static void ccp_lattice_meet (ccp_prop_value_t
*, ccp_prop_value_t
*);
193 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
196 dump_lattice_value (FILE *outf
, const char *prefix
, ccp_prop_value_t val
)
198 switch (val
.lattice_val
)
201 fprintf (outf
, "%sUNINITIALIZED", prefix
);
204 fprintf (outf
, "%sUNDEFINED", prefix
);
207 fprintf (outf
, "%sVARYING", prefix
);
210 if (TREE_CODE (val
.value
) != INTEGER_CST
213 fprintf (outf
, "%sCONSTANT ", prefix
);
214 print_generic_expr (outf
, val
.value
, dump_flags
);
218 widest_int cval
= wi::bit_and_not (wi::to_widest (val
.value
),
220 fprintf (outf
, "%sCONSTANT ", prefix
);
221 print_hex (cval
, outf
);
222 fprintf (outf
, " (");
223 print_hex (val
.mask
, outf
);
233 /* Print lattice value VAL to stderr. */
235 void debug_lattice_value (ccp_prop_value_t val
);
238 debug_lattice_value (ccp_prop_value_t val
)
240 dump_lattice_value (stderr
, "", val
);
241 fprintf (stderr
, "\n");
244 /* Extend NONZERO_BITS to a full mask, based on sgn. */
247 extend_mask (const wide_int
&nonzero_bits
, signop sgn
)
249 return widest_int::from (nonzero_bits
, sgn
);
252 /* Compute a default value for variable VAR and store it in the
253 CONST_VAL array. The following rules are used to get default
256 1- Global and static variables that are declared constant are
259 2- Any other value is considered UNDEFINED. This is useful when
260 considering PHI nodes. PHI arguments that are undefined do not
261 change the constant value of the PHI node, which allows for more
262 constants to be propagated.
264 3- Variables defined by statements other than assignments and PHI
265 nodes are considered VARYING.
267 4- Initial values of variables that are not GIMPLE registers are
268 considered VARYING. */
270 static ccp_prop_value_t
271 get_default_value (tree var
)
273 ccp_prop_value_t val
= { UNINITIALIZED
, NULL_TREE
, 0 };
276 stmt
= SSA_NAME_DEF_STMT (var
);
278 if (gimple_nop_p (stmt
))
280 /* Variables defined by an empty statement are those used
281 before being initialized. If VAR is a local variable, we
282 can assume initially that it is UNDEFINED, otherwise we must
283 consider it VARYING. */
284 if (!virtual_operand_p (var
)
285 && SSA_NAME_VAR (var
)
286 && TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
287 val
.lattice_val
= UNDEFINED
;
290 val
.lattice_val
= VARYING
;
292 if (flag_tree_bit_ccp
)
294 wide_int nonzero_bits
= get_nonzero_bits (var
);
295 if (nonzero_bits
!= -1)
297 val
.lattice_val
= CONSTANT
;
298 val
.value
= build_zero_cst (TREE_TYPE (var
));
299 val
.mask
= extend_mask (nonzero_bits
, TYPE_SIGN (TREE_TYPE (var
)));
304 else if (is_gimple_assign (stmt
))
307 if (gimple_assign_single_p (stmt
)
308 && DECL_P (gimple_assign_rhs1 (stmt
))
309 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
311 val
.lattice_val
= CONSTANT
;
316 /* Any other variable defined by an assignment is considered
318 val
.lattice_val
= UNDEFINED
;
321 else if ((is_gimple_call (stmt
)
322 && gimple_call_lhs (stmt
) != NULL_TREE
)
323 || gimple_code (stmt
) == GIMPLE_PHI
)
325 /* A variable defined by a call or a PHI node is considered
327 val
.lattice_val
= UNDEFINED
;
331 /* Otherwise, VAR will never take on a constant value. */
332 val
.lattice_val
= VARYING
;
340 /* Get the constant value associated with variable VAR. */
342 static inline ccp_prop_value_t
*
345 ccp_prop_value_t
*val
;
347 if (const_val
== NULL
348 || SSA_NAME_VERSION (var
) >= n_const_val
)
351 val
= &const_val
[SSA_NAME_VERSION (var
)];
352 if (val
->lattice_val
== UNINITIALIZED
)
353 *val
= get_default_value (var
);
355 canonicalize_value (val
);
360 /* Return the constant tree value associated with VAR. */
363 get_constant_value (tree var
)
365 ccp_prop_value_t
*val
;
366 if (TREE_CODE (var
) != SSA_NAME
)
368 if (is_gimple_min_invariant (var
))
372 val
= get_value (var
);
374 && val
->lattice_val
== CONSTANT
375 && (TREE_CODE (val
->value
) != INTEGER_CST
381 /* Sets the value associated with VAR to VARYING. */
384 set_value_varying (tree var
)
386 ccp_prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
388 val
->lattice_val
= VARYING
;
389 val
->value
= NULL_TREE
;
393 /* For integer constants, make sure to drop TREE_OVERFLOW. */
396 canonicalize_value (ccp_prop_value_t
*val
)
398 if (val
->lattice_val
!= CONSTANT
)
401 if (TREE_OVERFLOW_P (val
->value
))
402 val
->value
= drop_tree_overflow (val
->value
);
405 /* Return whether the lattice transition is valid. */
408 valid_lattice_transition (ccp_prop_value_t old_val
, ccp_prop_value_t new_val
)
410 /* Lattice transitions must always be monotonically increasing in
412 if (old_val
.lattice_val
< new_val
.lattice_val
)
415 if (old_val
.lattice_val
!= new_val
.lattice_val
)
418 if (!old_val
.value
&& !new_val
.value
)
421 /* Now both lattice values are CONSTANT. */
423 /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
424 when only a single copy edge is executable. */
425 if (TREE_CODE (old_val
.value
) == SSA_NAME
426 && TREE_CODE (new_val
.value
) == SSA_NAME
)
429 /* Allow transitioning from a constant to a copy. */
430 if (is_gimple_min_invariant (old_val
.value
)
431 && TREE_CODE (new_val
.value
) == SSA_NAME
)
434 /* Allow transitioning from PHI <&x, not executable> == &x
435 to PHI <&x, &y> == common alignment. */
436 if (TREE_CODE (old_val
.value
) != INTEGER_CST
437 && TREE_CODE (new_val
.value
) == INTEGER_CST
)
440 /* Bit-lattices have to agree in the still valid bits. */
441 if (TREE_CODE (old_val
.value
) == INTEGER_CST
442 && TREE_CODE (new_val
.value
) == INTEGER_CST
)
443 return (wi::bit_and_not (wi::to_widest (old_val
.value
), new_val
.mask
)
444 == wi::bit_and_not (wi::to_widest (new_val
.value
), new_val
.mask
));
446 /* Otherwise constant values have to agree. */
447 if (operand_equal_p (old_val
.value
, new_val
.value
, 0))
450 /* At least the kinds and types should agree now. */
451 if (TREE_CODE (old_val
.value
) != TREE_CODE (new_val
.value
)
452 || !types_compatible_p (TREE_TYPE (old_val
.value
),
453 TREE_TYPE (new_val
.value
)))
456 /* For floats and !HONOR_NANS allow transitions from (partial) NaN
458 tree type
= TREE_TYPE (new_val
.value
);
459 if (SCALAR_FLOAT_TYPE_P (type
)
460 && !HONOR_NANS (type
))
462 if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val
.value
)))
465 else if (VECTOR_FLOAT_TYPE_P (type
)
466 && !HONOR_NANS (type
))
469 = tree_vector_builder::binary_encoded_nelts (old_val
.value
,
471 for (unsigned int i
= 0; i
< count
; ++i
)
472 if (!REAL_VALUE_ISNAN
473 (TREE_REAL_CST (VECTOR_CST_ENCODED_ELT (old_val
.value
, i
)))
474 && !operand_equal_p (VECTOR_CST_ENCODED_ELT (old_val
.value
, i
),
475 VECTOR_CST_ENCODED_ELT (new_val
.value
, i
), 0))
479 else if (COMPLEX_FLOAT_TYPE_P (type
)
480 && !HONOR_NANS (type
))
482 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val
.value
)))
483 && !operand_equal_p (TREE_REALPART (old_val
.value
),
484 TREE_REALPART (new_val
.value
), 0))
486 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val
.value
)))
487 && !operand_equal_p (TREE_IMAGPART (old_val
.value
),
488 TREE_IMAGPART (new_val
.value
), 0))
495 /* Set the value for variable VAR to NEW_VAL. Return true if the new
496 value is different from VAR's previous value. */
499 set_lattice_value (tree var
, ccp_prop_value_t
*new_val
)
501 /* We can deal with old UNINITIALIZED values just fine here. */
502 ccp_prop_value_t
*old_val
= &const_val
[SSA_NAME_VERSION (var
)];
504 canonicalize_value (new_val
);
506 /* We have to be careful to not go up the bitwise lattice
507 represented by the mask. Instead of dropping to VARYING
508 use the meet operator to retain a conservative value.
509 Missed optimizations like PR65851 makes this necessary.
510 It also ensures we converge to a stable lattice solution. */
511 if (old_val
->lattice_val
!= UNINITIALIZED
)
512 ccp_lattice_meet (new_val
, old_val
);
514 gcc_checking_assert (valid_lattice_transition (*old_val
, *new_val
));
516 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
517 caller that this was a non-transition. */
518 if (old_val
->lattice_val
!= new_val
->lattice_val
519 || (new_val
->lattice_val
== CONSTANT
520 && (TREE_CODE (new_val
->value
) != TREE_CODE (old_val
->value
)
521 || (TREE_CODE (new_val
->value
) == INTEGER_CST
522 && (new_val
->mask
!= old_val
->mask
523 || (wi::bit_and_not (wi::to_widest (old_val
->value
),
525 != wi::bit_and_not (wi::to_widest (new_val
->value
),
527 || (TREE_CODE (new_val
->value
) != INTEGER_CST
528 && !operand_equal_p (new_val
->value
, old_val
->value
, 0)))))
530 /* ??? We would like to delay creation of INTEGER_CSTs from
531 partially constants here. */
533 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
535 dump_lattice_value (dump_file
, "Lattice value changed to ", *new_val
);
536 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
541 gcc_assert (new_val
->lattice_val
!= UNINITIALIZED
);
548 static ccp_prop_value_t
get_value_for_expr (tree
, bool);
549 static ccp_prop_value_t
bit_value_binop (enum tree_code
, tree
, tree
, tree
);
550 void bit_value_binop (enum tree_code
, signop
, int, widest_int
*, widest_int
*,
551 signop
, int, const widest_int
&, const widest_int
&,
552 signop
, int, const widest_int
&, const widest_int
&);
554 /* Return a widest_int that can be used for bitwise simplifications
558 value_to_wide_int (ccp_prop_value_t val
)
561 && TREE_CODE (val
.value
) == INTEGER_CST
)
562 return wi::to_widest (val
.value
);
567 /* Return the value for the address expression EXPR based on alignment
570 static ccp_prop_value_t
571 get_value_from_alignment (tree expr
)
573 tree type
= TREE_TYPE (expr
);
574 ccp_prop_value_t val
;
575 unsigned HOST_WIDE_INT bitpos
;
578 gcc_assert (TREE_CODE (expr
) == ADDR_EXPR
);
580 get_pointer_alignment_1 (expr
, &align
, &bitpos
);
581 val
.mask
= wi::bit_and_not
582 (POINTER_TYPE_P (type
) || TYPE_UNSIGNED (type
)
583 ? wi::mask
<widest_int
> (TYPE_PRECISION (type
), false)
585 align
/ BITS_PER_UNIT
- 1);
587 = wi::sext (val
.mask
, TYPE_PRECISION (type
)) == -1 ? VARYING
: CONSTANT
;
588 if (val
.lattice_val
== CONSTANT
)
589 val
.value
= build_int_cstu (type
, bitpos
/ BITS_PER_UNIT
);
591 val
.value
= NULL_TREE
;
596 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
597 return constant bits extracted from alignment information for
598 invariant addresses. */
600 static ccp_prop_value_t
601 get_value_for_expr (tree expr
, bool for_bits_p
)
603 ccp_prop_value_t val
;
605 if (TREE_CODE (expr
) == SSA_NAME
)
607 ccp_prop_value_t
*val_
= get_value (expr
);
612 val
.lattice_val
= VARYING
;
613 val
.value
= NULL_TREE
;
617 && val
.lattice_val
== CONSTANT
618 && TREE_CODE (val
.value
) == ADDR_EXPR
)
619 val
= get_value_from_alignment (val
.value
);
620 /* Fall back to a copy value. */
622 && val
.lattice_val
== VARYING
623 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr
))
625 val
.lattice_val
= CONSTANT
;
630 else if (is_gimple_min_invariant (expr
)
631 && (!for_bits_p
|| TREE_CODE (expr
) == INTEGER_CST
))
633 val
.lattice_val
= CONSTANT
;
636 canonicalize_value (&val
);
638 else if (TREE_CODE (expr
) == ADDR_EXPR
)
639 val
= get_value_from_alignment (expr
);
642 val
.lattice_val
= VARYING
;
644 val
.value
= NULL_TREE
;
647 if (val
.lattice_val
== VARYING
648 && TYPE_UNSIGNED (TREE_TYPE (expr
)))
649 val
.mask
= wi::zext (val
.mask
, TYPE_PRECISION (TREE_TYPE (expr
)));
654 /* Return the likely CCP lattice value for STMT.
656 If STMT has no operands, then return CONSTANT.
658 Else if undefinedness of operands of STMT cause its value to be
659 undefined, then return UNDEFINED.
661 Else if any operands of STMT are constants, then return CONSTANT.
663 Else return VARYING. */
666 likely_value (gimple
*stmt
)
668 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
669 bool has_nsa_operand
;
674 enum gimple_code code
= gimple_code (stmt
);
676 /* This function appears to be called only for assignments, calls,
677 conditionals, and switches, due to the logic in visit_stmt. */
678 gcc_assert (code
== GIMPLE_ASSIGN
679 || code
== GIMPLE_CALL
680 || code
== GIMPLE_COND
681 || code
== GIMPLE_SWITCH
);
683 /* If the statement has volatile operands, it won't fold to a
685 if (gimple_has_volatile_ops (stmt
))
688 /* Arrive here for more complex cases. */
689 has_constant_operand
= false;
690 has_undefined_operand
= false;
691 all_undefined_operands
= true;
692 has_nsa_operand
= false;
693 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
695 ccp_prop_value_t
*val
= get_value (use
);
697 if (val
&& val
->lattice_val
== UNDEFINED
)
698 has_undefined_operand
= true;
700 all_undefined_operands
= false;
702 if (val
&& val
->lattice_val
== CONSTANT
)
703 has_constant_operand
= true;
705 if (SSA_NAME_IS_DEFAULT_DEF (use
)
706 || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use
)))
707 has_nsa_operand
= true;
710 /* There may be constants in regular rhs operands. For calls we
711 have to ignore lhs, fndecl and static chain, otherwise only
713 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
714 i
< gimple_num_ops (stmt
); ++i
)
716 tree op
= gimple_op (stmt
, i
);
717 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
719 if (is_gimple_min_invariant (op
))
720 has_constant_operand
= true;
723 if (has_constant_operand
)
724 all_undefined_operands
= false;
726 if (has_undefined_operand
727 && code
== GIMPLE_CALL
728 && gimple_call_internal_p (stmt
))
729 switch (gimple_call_internal_fn (stmt
))
731 /* These 3 builtins use the first argument just as a magic
732 way how to find out a decl uid. */
733 case IFN_GOMP_SIMD_LANE
:
734 case IFN_GOMP_SIMD_VF
:
735 case IFN_GOMP_SIMD_LAST_LANE
:
736 has_undefined_operand
= false;
742 /* If the operation combines operands like COMPLEX_EXPR make sure to
743 not mark the result UNDEFINED if only one part of the result is
745 if (has_undefined_operand
&& all_undefined_operands
)
747 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
749 switch (gimple_assign_rhs_code (stmt
))
751 /* Unary operators are handled with all_undefined_operands. */
754 case POINTER_PLUS_EXPR
:
756 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
757 Not bitwise operators, one VARYING operand may specify the
759 Not logical operators for the same reason, apart from XOR.
760 Not COMPLEX_EXPR as one VARYING operand makes the result partly
761 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
762 the undefined operand may be promoted. */
766 /* If any part of an address is UNDEFINED, like the index
767 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
774 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
775 fall back to CONSTANT. During iteration UNDEFINED may still drop
777 if (has_undefined_operand
)
780 /* We do not consider virtual operands here -- load from read-only
781 memory may have only VARYING virtual operands, but still be
782 constant. Also we can combine the stmt with definitions from
783 operands whose definitions are not simulated again. */
784 if (has_constant_operand
786 || gimple_references_memory_p (stmt
))
792 /* Returns true if STMT cannot be constant. */
795 surely_varying_stmt_p (gimple
*stmt
)
797 /* If the statement has operands that we cannot handle, it cannot be
799 if (gimple_has_volatile_ops (stmt
))
802 /* If it is a call and does not return a value or is not a
803 builtin and not an indirect call or a call to function with
804 assume_aligned/alloc_align attribute, it is varying. */
805 if (is_gimple_call (stmt
))
807 tree fndecl
, fntype
= gimple_call_fntype (stmt
);
808 if (!gimple_call_lhs (stmt
)
809 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
810 && !fndecl_built_in_p (fndecl
)
811 && !lookup_attribute ("assume_aligned",
812 TYPE_ATTRIBUTES (fntype
))
813 && !lookup_attribute ("alloc_align",
814 TYPE_ATTRIBUTES (fntype
))))
818 /* Any other store operation is not interesting. */
819 else if (gimple_vdef (stmt
))
822 /* Anything other than assignments and conditional jumps are not
823 interesting for CCP. */
824 if (gimple_code (stmt
) != GIMPLE_ASSIGN
825 && gimple_code (stmt
) != GIMPLE_COND
826 && gimple_code (stmt
) != GIMPLE_SWITCH
827 && gimple_code (stmt
) != GIMPLE_CALL
)
833 /* Initialize local data structures for CCP. */
836 ccp_initialize (void)
840 n_const_val
= num_ssa_names
;
841 const_val
= XCNEWVEC (ccp_prop_value_t
, n_const_val
);
843 /* Initialize simulation flags for PHI nodes and statements. */
844 FOR_EACH_BB_FN (bb
, cfun
)
846 gimple_stmt_iterator i
;
848 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
850 gimple
*stmt
= gsi_stmt (i
);
853 /* If the statement is a control insn, then we do not
854 want to avoid simulating the statement once. Failure
855 to do so means that those edges will never get added. */
856 if (stmt_ends_bb_p (stmt
))
859 is_varying
= surely_varying_stmt_p (stmt
);
866 /* If the statement will not produce a constant, mark
867 all its outputs VARYING. */
868 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
869 set_value_varying (def
);
871 prop_set_simulate_again (stmt
, !is_varying
);
875 /* Now process PHI nodes. We never clear the simulate_again flag on
876 phi nodes, since we do not know which edges are executable yet,
877 except for phi nodes for virtual operands when we do not do store ccp. */
878 FOR_EACH_BB_FN (bb
, cfun
)
882 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
884 gphi
*phi
= i
.phi ();
886 if (virtual_operand_p (gimple_phi_result (phi
)))
887 prop_set_simulate_again (phi
, false);
889 prop_set_simulate_again (phi
, true);
894 /* Debug count support. Reset the values of ssa names
895 VARYING when the total number ssa names analyzed is
896 beyond the debug count specified. */
902 for (i
= 0; i
< num_ssa_names
; i
++)
906 const_val
[i
].lattice_val
= VARYING
;
907 const_val
[i
].mask
= -1;
908 const_val
[i
].value
= NULL_TREE
;
914 /* We want to provide our own GET_VALUE and FOLD_STMT virtual methods. */
915 class ccp_folder
: public substitute_and_fold_engine
918 tree
get_value (tree
) FINAL OVERRIDE
;
919 bool fold_stmt (gimple_stmt_iterator
*) FINAL OVERRIDE
;
922 /* This method just wraps GET_CONSTANT_VALUE for now. Over time
923 naked calls to GET_CONSTANT_VALUE should be eliminated in favor
924 of calling member functions. */
927 ccp_folder::get_value (tree op
)
929 return get_constant_value (op
);
932 /* Do final substitution of propagated values, cleanup the flowgraph and
933 free allocated storage. If NONZERO_P, record nonzero bits.
935 Return TRUE when something was optimized. */
938 ccp_finalize (bool nonzero_p
)
940 bool something_changed
;
946 /* Derive alignment and misalignment information from partially
947 constant pointers in the lattice or nonzero bits from partially
948 constant integers. */
949 FOR_EACH_SSA_NAME (i
, name
, cfun
)
951 ccp_prop_value_t
*val
;
952 unsigned int tem
, align
;
954 if (!POINTER_TYPE_P (TREE_TYPE (name
))
955 && (!INTEGRAL_TYPE_P (TREE_TYPE (name
))
956 /* Don't record nonzero bits before IPA to avoid
957 using too much memory. */
961 val
= get_value (name
);
962 if (val
->lattice_val
!= CONSTANT
963 || TREE_CODE (val
->value
) != INTEGER_CST
967 if (POINTER_TYPE_P (TREE_TYPE (name
)))
969 /* Trailing mask bits specify the alignment, trailing value
970 bits the misalignment. */
971 tem
= val
->mask
.to_uhwi ();
972 align
= least_bit_hwi (tem
);
974 set_ptr_info_alignment (get_ptr_info (name
), align
,
975 (TREE_INT_CST_LOW (val
->value
)
980 unsigned int precision
= TYPE_PRECISION (TREE_TYPE (val
->value
));
981 wide_int nonzero_bits
982 = (wide_int::from (val
->mask
, precision
, UNSIGNED
)
983 | wi::to_wide (val
->value
));
984 nonzero_bits
&= get_nonzero_bits (name
);
985 set_nonzero_bits (name
, nonzero_bits
);
989 /* Perform substitutions based on the known constant values. */
990 class ccp_folder ccp_folder
;
991 something_changed
= ccp_folder
.substitute_and_fold ();
995 return something_changed
;
999 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
1002 any M UNDEFINED = any
1003 any M VARYING = VARYING
1004 Ci M Cj = Ci if (i == j)
1005 Ci M Cj = VARYING if (i != j)
1009 ccp_lattice_meet (ccp_prop_value_t
*val1
, ccp_prop_value_t
*val2
)
1011 if (val1
->lattice_val
== UNDEFINED
1012 /* For UNDEFINED M SSA we can't always SSA because its definition
1013 may not dominate the PHI node. Doing optimistic copy propagation
1014 also causes a lot of gcc.dg/uninit-pred*.c FAILs. */
1015 && (val2
->lattice_val
!= CONSTANT
1016 || TREE_CODE (val2
->value
) != SSA_NAME
))
1018 /* UNDEFINED M any = any */
1021 else if (val2
->lattice_val
== UNDEFINED
1023 && (val1
->lattice_val
!= CONSTANT
1024 || TREE_CODE (val1
->value
) != SSA_NAME
))
1026 /* any M UNDEFINED = any
1027 Nothing to do. VAL1 already contains the value we want. */
1030 else if (val1
->lattice_val
== VARYING
1031 || val2
->lattice_val
== VARYING
)
1033 /* any M VARYING = VARYING. */
1034 val1
->lattice_val
= VARYING
;
1036 val1
->value
= NULL_TREE
;
1038 else if (val1
->lattice_val
== CONSTANT
1039 && val2
->lattice_val
== CONSTANT
1040 && TREE_CODE (val1
->value
) == INTEGER_CST
1041 && TREE_CODE (val2
->value
) == INTEGER_CST
)
1043 /* Ci M Cj = Ci if (i == j)
1044 Ci M Cj = VARYING if (i != j)
1046 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
1048 val1
->mask
= (val1
->mask
| val2
->mask
1049 | (wi::to_widest (val1
->value
)
1050 ^ wi::to_widest (val2
->value
)));
1051 if (wi::sext (val1
->mask
, TYPE_PRECISION (TREE_TYPE (val1
->value
))) == -1)
1053 val1
->lattice_val
= VARYING
;
1054 val1
->value
= NULL_TREE
;
1057 else if (val1
->lattice_val
== CONSTANT
1058 && val2
->lattice_val
== CONSTANT
1059 && operand_equal_p (val1
->value
, val2
->value
, 0))
1061 /* Ci M Cj = Ci if (i == j)
1062 Ci M Cj = VARYING if (i != j)
1064 VAL1 already contains the value we want for equivalent values. */
1066 else if (val1
->lattice_val
== CONSTANT
1067 && val2
->lattice_val
== CONSTANT
1068 && (TREE_CODE (val1
->value
) == ADDR_EXPR
1069 || TREE_CODE (val2
->value
) == ADDR_EXPR
))
1071 /* When not equal addresses are involved try meeting for
1073 ccp_prop_value_t tem
= *val2
;
1074 if (TREE_CODE (val1
->value
) == ADDR_EXPR
)
1075 *val1
= get_value_for_expr (val1
->value
, true);
1076 if (TREE_CODE (val2
->value
) == ADDR_EXPR
)
1077 tem
= get_value_for_expr (val2
->value
, true);
1078 ccp_lattice_meet (val1
, &tem
);
1082 /* Any other combination is VARYING. */
1083 val1
->lattice_val
= VARYING
;
1085 val1
->value
= NULL_TREE
;
1090 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
1091 lattice values to determine PHI_NODE's lattice value. The value of a
1092 PHI node is determined calling ccp_lattice_meet with all the arguments
1093 of the PHI node that are incoming via executable edges. */
1095 enum ssa_prop_result
1096 ccp_propagate::visit_phi (gphi
*phi
)
1099 ccp_prop_value_t new_val
;
1101 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1103 fprintf (dump_file
, "\nVisiting PHI node: ");
1104 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
1107 new_val
.lattice_val
= UNDEFINED
;
1108 new_val
.value
= NULL_TREE
;
1112 bool non_exec_edge
= false;
1113 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1115 /* Compute the meet operator over all the PHI arguments flowing
1116 through executable edges. */
1117 edge e
= gimple_phi_arg_edge (phi
, i
);
1119 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1122 "\n Argument #%d (%d -> %d %sexecutable)\n",
1123 i
, e
->src
->index
, e
->dest
->index
,
1124 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
1127 /* If the incoming edge is executable, Compute the meet operator for
1128 the existing value of the PHI node and the current PHI argument. */
1129 if (e
->flags
& EDGE_EXECUTABLE
)
1131 tree arg
= gimple_phi_arg (phi
, i
)->def
;
1132 ccp_prop_value_t arg_val
= get_value_for_expr (arg
, false);
1140 ccp_lattice_meet (&new_val
, &arg_val
);
1142 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1144 fprintf (dump_file
, "\t");
1145 print_generic_expr (dump_file
, arg
, dump_flags
);
1146 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
1147 fprintf (dump_file
, "\n");
1150 if (new_val
.lattice_val
== VARYING
)
1154 non_exec_edge
= true;
1157 /* In case there were non-executable edges and the value is a copy
1158 make sure its definition dominates the PHI node. */
1160 && new_val
.lattice_val
== CONSTANT
1161 && TREE_CODE (new_val
.value
) == SSA_NAME
1162 && ! SSA_NAME_IS_DEFAULT_DEF (new_val
.value
)
1163 && ! dominated_by_p (CDI_DOMINATORS
, gimple_bb (phi
),
1164 gimple_bb (SSA_NAME_DEF_STMT (new_val
.value
))))
1166 new_val
.lattice_val
= VARYING
;
1167 new_val
.value
= NULL_TREE
;
1171 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1173 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
1174 fprintf (dump_file
, "\n\n");
1177 /* Make the transition to the new value. */
1178 if (set_lattice_value (gimple_phi_result (phi
), &new_val
))
1180 if (new_val
.lattice_val
== VARYING
)
1181 return SSA_PROP_VARYING
;
1183 return SSA_PROP_INTERESTING
;
1186 return SSA_PROP_NOT_INTERESTING
;
1189 /* Return the constant value for OP or OP otherwise. */
1192 valueize_op (tree op
)
1194 if (TREE_CODE (op
) == SSA_NAME
)
1196 tree tem
= get_constant_value (op
);
1203 /* Return the constant value for OP, but signal to not follow SSA
1204 edges if the definition may be simulated again. */
1207 valueize_op_1 (tree op
)
1209 if (TREE_CODE (op
) == SSA_NAME
)
1211 /* If the definition may be simulated again we cannot follow
1212 this SSA edge as the SSA propagator does not necessarily
1213 re-visit the use. */
1214 gimple
*def_stmt
= SSA_NAME_DEF_STMT (op
);
1215 if (!gimple_nop_p (def_stmt
)
1216 && prop_simulate_again_p (def_stmt
))
1218 tree tem
= get_constant_value (op
);
1225 /* CCP specific front-end to the non-destructive constant folding
1228 Attempt to simplify the RHS of STMT knowing that one or more
1229 operands are constants.
1231 If simplification is possible, return the simplified RHS,
1232 otherwise return the original RHS or NULL_TREE. */
1235 ccp_fold (gimple
*stmt
)
1237 location_t loc
= gimple_location (stmt
);
1238 switch (gimple_code (stmt
))
1242 /* Handle comparison operators that can appear in GIMPLE form. */
1243 tree op0
= valueize_op (gimple_cond_lhs (stmt
));
1244 tree op1
= valueize_op (gimple_cond_rhs (stmt
));
1245 enum tree_code code
= gimple_cond_code (stmt
);
1246 return fold_binary_loc (loc
, code
, boolean_type_node
, op0
, op1
);
1251 /* Return the constant switch index. */
1252 return valueize_op (gimple_switch_index (as_a
<gswitch
*> (stmt
)));
1257 return gimple_fold_stmt_to_constant_1 (stmt
,
1258 valueize_op
, valueize_op_1
);
1265 /* Apply the operation CODE in type TYPE to the value, mask pair
1266 RVAL and RMASK representing a value of type RTYPE and set
1267 the value, mask pair *VAL and *MASK to the result. */
1270 bit_value_unop (enum tree_code code
, signop type_sgn
, int type_precision
,
1271 widest_int
*val
, widest_int
*mask
,
1272 signop rtype_sgn
, int rtype_precision
,
1273 const widest_int
&rval
, const widest_int
&rmask
)
1284 widest_int temv
, temm
;
1285 /* Return ~rval + 1. */
1286 bit_value_unop (BIT_NOT_EXPR
, type_sgn
, type_precision
, &temv
, &temm
,
1287 type_sgn
, type_precision
, rval
, rmask
);
1288 bit_value_binop (PLUS_EXPR
, type_sgn
, type_precision
, val
, mask
,
1289 type_sgn
, type_precision
, temv
, temm
,
1290 type_sgn
, type_precision
, 1, 0);
1296 /* First extend mask and value according to the original type. */
1297 *mask
= wi::ext (rmask
, rtype_precision
, rtype_sgn
);
1298 *val
= wi::ext (rval
, rtype_precision
, rtype_sgn
);
1300 /* Then extend mask and value according to the target type. */
1301 *mask
= wi::ext (*mask
, type_precision
, type_sgn
);
1302 *val
= wi::ext (*val
, type_precision
, type_sgn
);
1312 /* Apply the operation CODE in type TYPE to the value, mask pairs
1313 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1314 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1317 bit_value_binop (enum tree_code code
, signop sgn
, int width
,
1318 widest_int
*val
, widest_int
*mask
,
1319 signop r1type_sgn
, int r1type_precision
,
1320 const widest_int
&r1val
, const widest_int
&r1mask
,
1321 signop r2type_sgn
, int r2type_precision
,
1322 const widest_int
&r2val
, const widest_int
&r2mask
)
1324 bool swap_p
= false;
1326 /* Assume we'll get a constant result. Use an initial non varying
1327 value, we fall back to varying in the end if necessary. */
1333 /* The mask is constant where there is a known not
1334 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1335 *mask
= (r1mask
| r2mask
) & (r1val
| r1mask
) & (r2val
| r2mask
);
1336 *val
= r1val
& r2val
;
1340 /* The mask is constant where there is a known
1341 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1342 *mask
= wi::bit_and_not (r1mask
| r2mask
,
1343 wi::bit_and_not (r1val
, r1mask
)
1344 | wi::bit_and_not (r2val
, r2mask
));
1345 *val
= r1val
| r2val
;
1350 *mask
= r1mask
| r2mask
;
1351 *val
= r1val
^ r2val
;
1358 widest_int shift
= r2val
;
1366 if (wi::neg_p (shift
))
1369 if (code
== RROTATE_EXPR
)
1370 code
= LROTATE_EXPR
;
1372 code
= RROTATE_EXPR
;
1374 if (code
== RROTATE_EXPR
)
1376 *mask
= wi::rrotate (r1mask
, shift
, width
);
1377 *val
= wi::rrotate (r1val
, shift
, width
);
1381 *mask
= wi::lrotate (r1mask
, shift
, width
);
1382 *val
= wi::lrotate (r1val
, shift
, width
);
1390 /* ??? We can handle partially known shift counts if we know
1391 its sign. That way we can tell that (x << (y | 8)) & 255
1395 widest_int shift
= r2val
;
1403 if (wi::neg_p (shift
))
1406 if (code
== RSHIFT_EXPR
)
1411 if (code
== RSHIFT_EXPR
)
1413 *mask
= wi::rshift (wi::ext (r1mask
, width
, sgn
), shift
, sgn
);
1414 *val
= wi::rshift (wi::ext (r1val
, width
, sgn
), shift
, sgn
);
1418 *mask
= wi::ext (r1mask
<< shift
, width
, sgn
);
1419 *val
= wi::ext (r1val
<< shift
, width
, sgn
);
1426 case POINTER_PLUS_EXPR
:
1428 /* Do the addition with unknown bits set to zero, to give carry-ins of
1429 zero wherever possible. */
1430 widest_int lo
= (wi::bit_and_not (r1val
, r1mask
)
1431 + wi::bit_and_not (r2val
, r2mask
));
1432 lo
= wi::ext (lo
, width
, sgn
);
1433 /* Do the addition with unknown bits set to one, to give carry-ins of
1434 one wherever possible. */
1435 widest_int hi
= (r1val
| r1mask
) + (r2val
| r2mask
);
1436 hi
= wi::ext (hi
, width
, sgn
);
1437 /* Each bit in the result is known if (a) the corresponding bits in
1438 both inputs are known, and (b) the carry-in to that bit position
1439 is known. We can check condition (b) by seeing if we got the same
1440 result with minimised carries as with maximised carries. */
1441 *mask
= r1mask
| r2mask
| (lo
^ hi
);
1442 *mask
= wi::ext (*mask
, width
, sgn
);
1443 /* It shouldn't matter whether we choose lo or hi here. */
1450 widest_int temv
, temm
;
1451 bit_value_unop (NEGATE_EXPR
, r2type_sgn
, r2type_precision
, &temv
, &temm
,
1452 r2type_sgn
, r2type_precision
, r2val
, r2mask
);
1453 bit_value_binop (PLUS_EXPR
, sgn
, width
, val
, mask
,
1454 r1type_sgn
, r1type_precision
, r1val
, r1mask
,
1455 r2type_sgn
, r2type_precision
, temv
, temm
);
1461 /* Just track trailing zeros in both operands and transfer
1462 them to the other. */
1463 int r1tz
= wi::ctz (r1val
| r1mask
);
1464 int r2tz
= wi::ctz (r2val
| r2mask
);
1465 if (r1tz
+ r2tz
>= width
)
1470 else if (r1tz
+ r2tz
> 0)
1472 *mask
= wi::ext (wi::mask
<widest_int
> (r1tz
+ r2tz
, true),
1482 widest_int m
= r1mask
| r2mask
;
1483 if (wi::bit_and_not (r1val
, m
) != wi::bit_and_not (r2val
, m
))
1486 *val
= ((code
== EQ_EXPR
) ? 0 : 1);
1490 /* We know the result of a comparison is always one or zero. */
1500 code
= swap_tree_comparison (code
);
1507 const widest_int
&o1val
= swap_p
? r2val
: r1val
;
1508 const widest_int
&o1mask
= swap_p
? r2mask
: r1mask
;
1509 const widest_int
&o2val
= swap_p
? r1val
: r2val
;
1510 const widest_int
&o2mask
= swap_p
? r1mask
: r2mask
;
1512 /* If the most significant bits are not known we know nothing. */
1513 if (wi::neg_p (o1mask
) || wi::neg_p (o2mask
))
1516 /* For comparisons the signedness is in the comparison operands. */
1519 /* If we know the most significant bits we know the values
1520 value ranges by means of treating varying bits as zero
1521 or one. Do a cross comparison of the max/min pairs. */
1522 maxmin
= wi::cmp (o1val
| o1mask
,
1523 wi::bit_and_not (o2val
, o2mask
), sgn
);
1524 minmax
= wi::cmp (wi::bit_and_not (o1val
, o1mask
),
1525 o2val
| o2mask
, sgn
);
1526 if (maxmin
< 0) /* o1 is less than o2. */
1531 else if (minmax
> 0) /* o1 is not less or equal to o2. */
1536 else if (maxmin
== minmax
) /* o1 and o2 are equal. */
1538 /* This probably should never happen as we'd have
1539 folded the thing during fully constant value folding. */
1541 *val
= (code
== LE_EXPR
? 1 : 0);
1545 /* We know the result of a comparison is always one or zero. */
1556 /* Return the propagation value when applying the operation CODE to
1557 the value RHS yielding type TYPE. */
1559 static ccp_prop_value_t
1560 bit_value_unop (enum tree_code code
, tree type
, tree rhs
)
1562 ccp_prop_value_t rval
= get_value_for_expr (rhs
, true);
1563 widest_int value
, mask
;
1564 ccp_prop_value_t val
;
1566 if (rval
.lattice_val
== UNDEFINED
)
1569 gcc_assert ((rval
.lattice_val
== CONSTANT
1570 && TREE_CODE (rval
.value
) == INTEGER_CST
)
1571 || wi::sext (rval
.mask
, TYPE_PRECISION (TREE_TYPE (rhs
))) == -1);
1572 bit_value_unop (code
, TYPE_SIGN (type
), TYPE_PRECISION (type
), &value
, &mask
,
1573 TYPE_SIGN (TREE_TYPE (rhs
)), TYPE_PRECISION (TREE_TYPE (rhs
)),
1574 value_to_wide_int (rval
), rval
.mask
);
1575 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1577 val
.lattice_val
= CONSTANT
;
1579 /* ??? Delay building trees here. */
1580 val
.value
= wide_int_to_tree (type
, value
);
1584 val
.lattice_val
= VARYING
;
1585 val
.value
= NULL_TREE
;
1591 /* Return the propagation value when applying the operation CODE to
1592 the values RHS1 and RHS2 yielding type TYPE. */
1594 static ccp_prop_value_t
1595 bit_value_binop (enum tree_code code
, tree type
, tree rhs1
, tree rhs2
)
1597 ccp_prop_value_t r1val
= get_value_for_expr (rhs1
, true);
1598 ccp_prop_value_t r2val
= get_value_for_expr (rhs2
, true);
1599 widest_int value
, mask
;
1600 ccp_prop_value_t val
;
1602 if (r1val
.lattice_val
== UNDEFINED
1603 || r2val
.lattice_val
== UNDEFINED
)
1605 val
.lattice_val
= VARYING
;
1606 val
.value
= NULL_TREE
;
1611 gcc_assert ((r1val
.lattice_val
== CONSTANT
1612 && TREE_CODE (r1val
.value
) == INTEGER_CST
)
1613 || wi::sext (r1val
.mask
,
1614 TYPE_PRECISION (TREE_TYPE (rhs1
))) == -1);
1615 gcc_assert ((r2val
.lattice_val
== CONSTANT
1616 && TREE_CODE (r2val
.value
) == INTEGER_CST
)
1617 || wi::sext (r2val
.mask
,
1618 TYPE_PRECISION (TREE_TYPE (rhs2
))) == -1);
1619 bit_value_binop (code
, TYPE_SIGN (type
), TYPE_PRECISION (type
), &value
, &mask
,
1620 TYPE_SIGN (TREE_TYPE (rhs1
)), TYPE_PRECISION (TREE_TYPE (rhs1
)),
1621 value_to_wide_int (r1val
), r1val
.mask
,
1622 TYPE_SIGN (TREE_TYPE (rhs2
)), TYPE_PRECISION (TREE_TYPE (rhs2
)),
1623 value_to_wide_int (r2val
), r2val
.mask
);
1625 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1627 val
.lattice_val
= CONSTANT
;
1629 /* ??? Delay building trees here. */
1630 val
.value
= wide_int_to_tree (type
, value
);
1634 val
.lattice_val
= VARYING
;
1635 val
.value
= NULL_TREE
;
1641 /* Return the propagation value for __builtin_assume_aligned
1642 and functions with assume_aligned or alloc_aligned attribute.
1643 For __builtin_assume_aligned, ATTR is NULL_TREE,
1644 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1645 is false, for alloc_aligned attribute ATTR is non-NULL and
1646 ALLOC_ALIGNED is true. */
1648 static ccp_prop_value_t
1649 bit_value_assume_aligned (gimple
*stmt
, tree attr
, ccp_prop_value_t ptrval
,
1652 tree align
, misalign
= NULL_TREE
, type
;
1653 unsigned HOST_WIDE_INT aligni
, misaligni
= 0;
1654 ccp_prop_value_t alignval
;
1655 widest_int value
, mask
;
1656 ccp_prop_value_t val
;
1658 if (attr
== NULL_TREE
)
1660 tree ptr
= gimple_call_arg (stmt
, 0);
1661 type
= TREE_TYPE (ptr
);
1662 ptrval
= get_value_for_expr (ptr
, true);
1666 tree lhs
= gimple_call_lhs (stmt
);
1667 type
= TREE_TYPE (lhs
);
1670 if (ptrval
.lattice_val
== UNDEFINED
)
1672 gcc_assert ((ptrval
.lattice_val
== CONSTANT
1673 && TREE_CODE (ptrval
.value
) == INTEGER_CST
)
1674 || wi::sext (ptrval
.mask
, TYPE_PRECISION (type
)) == -1);
1675 if (attr
== NULL_TREE
)
1677 /* Get aligni and misaligni from __builtin_assume_aligned. */
1678 align
= gimple_call_arg (stmt
, 1);
1679 if (!tree_fits_uhwi_p (align
))
1681 aligni
= tree_to_uhwi (align
);
1682 if (gimple_call_num_args (stmt
) > 2)
1684 misalign
= gimple_call_arg (stmt
, 2);
1685 if (!tree_fits_uhwi_p (misalign
))
1687 misaligni
= tree_to_uhwi (misalign
);
1692 /* Get aligni and misaligni from assume_aligned or
1693 alloc_align attributes. */
1694 if (TREE_VALUE (attr
) == NULL_TREE
)
1696 attr
= TREE_VALUE (attr
);
1697 align
= TREE_VALUE (attr
);
1698 if (!tree_fits_uhwi_p (align
))
1700 aligni
= tree_to_uhwi (align
);
1703 if (aligni
== 0 || aligni
> gimple_call_num_args (stmt
))
1705 align
= gimple_call_arg (stmt
, aligni
- 1);
1706 if (!tree_fits_uhwi_p (align
))
1708 aligni
= tree_to_uhwi (align
);
1710 else if (TREE_CHAIN (attr
) && TREE_VALUE (TREE_CHAIN (attr
)))
1712 misalign
= TREE_VALUE (TREE_CHAIN (attr
));
1713 if (!tree_fits_uhwi_p (misalign
))
1715 misaligni
= tree_to_uhwi (misalign
);
1718 if (aligni
<= 1 || (aligni
& (aligni
- 1)) != 0 || misaligni
>= aligni
)
1721 align
= build_int_cst_type (type
, -aligni
);
1722 alignval
= get_value_for_expr (align
, true);
1723 bit_value_binop (BIT_AND_EXPR
, TYPE_SIGN (type
), TYPE_PRECISION (type
), &value
, &mask
,
1724 TYPE_SIGN (type
), TYPE_PRECISION (type
), value_to_wide_int (ptrval
), ptrval
.mask
,
1725 TYPE_SIGN (type
), TYPE_PRECISION (type
), value_to_wide_int (alignval
), alignval
.mask
);
1727 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1729 val
.lattice_val
= CONSTANT
;
1731 gcc_assert ((mask
.to_uhwi () & (aligni
- 1)) == 0);
1732 gcc_assert ((value
.to_uhwi () & (aligni
- 1)) == 0);
1734 /* ??? Delay building trees here. */
1735 val
.value
= wide_int_to_tree (type
, value
);
1739 val
.lattice_val
= VARYING
;
1740 val
.value
= NULL_TREE
;
1746 /* Evaluate statement STMT.
1747 Valid only for assignments, calls, conditionals, and switches. */
1749 static ccp_prop_value_t
1750 evaluate_stmt (gimple
*stmt
)
1752 ccp_prop_value_t val
;
1753 tree simplified
= NULL_TREE
;
1754 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1755 bool is_constant
= false;
1758 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1760 fprintf (dump_file
, "which is likely ");
1761 switch (likelyvalue
)
1764 fprintf (dump_file
, "CONSTANT");
1767 fprintf (dump_file
, "UNDEFINED");
1770 fprintf (dump_file
, "VARYING");
1774 fprintf (dump_file
, "\n");
1777 /* If the statement is likely to have a CONSTANT result, then try
1778 to fold the statement to determine the constant value. */
1779 /* FIXME. This is the only place that we call ccp_fold.
1780 Since likely_value never returns CONSTANT for calls, we will
1781 not attempt to fold them, including builtins that may profit. */
1782 if (likelyvalue
== CONSTANT
)
1784 fold_defer_overflow_warnings ();
1785 simplified
= ccp_fold (stmt
);
1787 && TREE_CODE (simplified
) == SSA_NAME
)
1789 /* We may not use values of something that may be simulated again,
1790 see valueize_op_1. */
1791 if (SSA_NAME_IS_DEFAULT_DEF (simplified
)
1792 || ! prop_simulate_again_p (SSA_NAME_DEF_STMT (simplified
)))
1794 ccp_prop_value_t
*val
= get_value (simplified
);
1795 if (val
&& val
->lattice_val
!= VARYING
)
1797 fold_undefer_overflow_warnings (true, stmt
, 0);
1802 /* We may also not place a non-valueized copy in the lattice
1803 as that might become stale if we never re-visit this stmt. */
1804 simplified
= NULL_TREE
;
1806 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1807 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1810 /* The statement produced a constant value. */
1811 val
.lattice_val
= CONSTANT
;
1812 val
.value
= simplified
;
1817 /* If the statement is likely to have a VARYING result, then do not
1818 bother folding the statement. */
1819 else if (likelyvalue
== VARYING
)
1821 enum gimple_code code
= gimple_code (stmt
);
1822 if (code
== GIMPLE_ASSIGN
)
1824 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1826 /* Other cases cannot satisfy is_gimple_min_invariant
1828 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1829 simplified
= gimple_assign_rhs1 (stmt
);
1831 else if (code
== GIMPLE_SWITCH
)
1832 simplified
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
1834 /* These cannot satisfy is_gimple_min_invariant without folding. */
1835 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1836 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1839 /* The statement produced a constant value. */
1840 val
.lattice_val
= CONSTANT
;
1841 val
.value
= simplified
;
1845 /* If the statement result is likely UNDEFINED, make it so. */
1846 else if (likelyvalue
== UNDEFINED
)
1848 val
.lattice_val
= UNDEFINED
;
1849 val
.value
= NULL_TREE
;
1854 /* Resort to simplification for bitwise tracking. */
1855 if (flag_tree_bit_ccp
1856 && (likelyvalue
== CONSTANT
|| is_gimple_call (stmt
)
1857 || (gimple_assign_single_p (stmt
)
1858 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
))
1861 enum gimple_code code
= gimple_code (stmt
);
1862 val
.lattice_val
= VARYING
;
1863 val
.value
= NULL_TREE
;
1865 if (code
== GIMPLE_ASSIGN
)
1867 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1868 tree rhs1
= gimple_assign_rhs1 (stmt
);
1869 tree lhs
= gimple_assign_lhs (stmt
);
1870 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
1871 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
1872 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1873 || POINTER_TYPE_P (TREE_TYPE (rhs1
))))
1874 switch (get_gimple_rhs_class (subcode
))
1876 case GIMPLE_SINGLE_RHS
:
1877 val
= get_value_for_expr (rhs1
, true);
1880 case GIMPLE_UNARY_RHS
:
1881 val
= bit_value_unop (subcode
, TREE_TYPE (lhs
), rhs1
);
1884 case GIMPLE_BINARY_RHS
:
1885 val
= bit_value_binop (subcode
, TREE_TYPE (lhs
), rhs1
,
1886 gimple_assign_rhs2 (stmt
));
1892 else if (code
== GIMPLE_COND
)
1894 enum tree_code code
= gimple_cond_code (stmt
);
1895 tree rhs1
= gimple_cond_lhs (stmt
);
1896 tree rhs2
= gimple_cond_rhs (stmt
);
1897 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1898 || POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1899 val
= bit_value_binop (code
, TREE_TYPE (rhs1
), rhs1
, rhs2
);
1901 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
1903 tree fndecl
= gimple_call_fndecl (stmt
);
1904 switch (DECL_FUNCTION_CODE (fndecl
))
1906 case BUILT_IN_MALLOC
:
1907 case BUILT_IN_REALLOC
:
1908 case BUILT_IN_CALLOC
:
1909 case BUILT_IN_STRDUP
:
1910 case BUILT_IN_STRNDUP
:
1911 val
.lattice_val
= CONSTANT
;
1912 val
.value
= build_int_cst (TREE_TYPE (gimple_get_lhs (stmt
)), 0);
1913 val
.mask
= ~((HOST_WIDE_INT
) MALLOC_ABI_ALIGNMENT
1914 / BITS_PER_UNIT
- 1);
1917 CASE_BUILT_IN_ALLOCA
:
1918 align
= (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
1920 : TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1)));
1921 val
.lattice_val
= CONSTANT
;
1922 val
.value
= build_int_cst (TREE_TYPE (gimple_get_lhs (stmt
)), 0);
1923 val
.mask
= ~((HOST_WIDE_INT
) align
/ BITS_PER_UNIT
- 1);
1926 /* These builtins return their first argument, unmodified. */
1927 case BUILT_IN_MEMCPY
:
1928 case BUILT_IN_MEMMOVE
:
1929 case BUILT_IN_MEMSET
:
1930 case BUILT_IN_STRCPY
:
1931 case BUILT_IN_STRNCPY
:
1932 case BUILT_IN_MEMCPY_CHK
:
1933 case BUILT_IN_MEMMOVE_CHK
:
1934 case BUILT_IN_MEMSET_CHK
:
1935 case BUILT_IN_STRCPY_CHK
:
1936 case BUILT_IN_STRNCPY_CHK
:
1937 val
= get_value_for_expr (gimple_call_arg (stmt
, 0), true);
1940 case BUILT_IN_ASSUME_ALIGNED
:
1941 val
= bit_value_assume_aligned (stmt
, NULL_TREE
, val
, false);
1944 case BUILT_IN_ALIGNED_ALLOC
:
1946 tree align
= get_constant_value (gimple_call_arg (stmt
, 0));
1948 && tree_fits_uhwi_p (align
))
1950 unsigned HOST_WIDE_INT aligni
= tree_to_uhwi (align
);
1952 /* align must be power-of-two */
1953 && (aligni
& (aligni
- 1)) == 0)
1955 val
.lattice_val
= CONSTANT
;
1956 val
.value
= build_int_cst (ptr_type_node
, 0);
1966 if (is_gimple_call (stmt
) && gimple_call_lhs (stmt
))
1968 tree fntype
= gimple_call_fntype (stmt
);
1971 tree attrs
= lookup_attribute ("assume_aligned",
1972 TYPE_ATTRIBUTES (fntype
));
1974 val
= bit_value_assume_aligned (stmt
, attrs
, val
, false);
1975 attrs
= lookup_attribute ("alloc_align",
1976 TYPE_ATTRIBUTES (fntype
));
1978 val
= bit_value_assume_aligned (stmt
, attrs
, val
, true);
1981 is_constant
= (val
.lattice_val
== CONSTANT
);
1984 if (flag_tree_bit_ccp
1985 && ((is_constant
&& TREE_CODE (val
.value
) == INTEGER_CST
)
1987 && gimple_get_lhs (stmt
)
1988 && TREE_CODE (gimple_get_lhs (stmt
)) == SSA_NAME
)
1990 tree lhs
= gimple_get_lhs (stmt
);
1991 wide_int nonzero_bits
= get_nonzero_bits (lhs
);
1992 if (nonzero_bits
!= -1)
1996 val
.lattice_val
= CONSTANT
;
1997 val
.value
= build_zero_cst (TREE_TYPE (lhs
));
1998 val
.mask
= extend_mask (nonzero_bits
, TYPE_SIGN (TREE_TYPE (lhs
)));
2003 if (wi::bit_and_not (wi::to_wide (val
.value
), nonzero_bits
) != 0)
2004 val
.value
= wide_int_to_tree (TREE_TYPE (lhs
),
2006 & wi::to_wide (val
.value
));
2007 if (nonzero_bits
== 0)
2010 val
.mask
= val
.mask
& extend_mask (nonzero_bits
,
2011 TYPE_SIGN (TREE_TYPE (lhs
)));
2016 /* The statement produced a nonconstant value. */
2019 /* The statement produced a copy. */
2020 if (simplified
&& TREE_CODE (simplified
) == SSA_NAME
2021 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified
))
2023 val
.lattice_val
= CONSTANT
;
2024 val
.value
= simplified
;
2027 /* The statement is VARYING. */
2030 val
.lattice_val
= VARYING
;
2031 val
.value
= NULL_TREE
;
2039 typedef hash_table
<nofree_ptr_hash
<gimple
> > gimple_htab
;
2041 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
2042 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
2045 insert_clobber_before_stack_restore (tree saved_val
, tree var
,
2046 gimple_htab
**visited
)
2049 gassign
*clobber_stmt
;
2051 imm_use_iterator iter
;
2052 gimple_stmt_iterator i
;
2055 FOR_EACH_IMM_USE_STMT (stmt
, iter
, saved_val
)
2056 if (gimple_call_builtin_p (stmt
, BUILT_IN_STACK_RESTORE
))
2058 clobber
= build_constructor (TREE_TYPE (var
),
2060 TREE_THIS_VOLATILE (clobber
) = 1;
2061 clobber_stmt
= gimple_build_assign (var
, clobber
);
2063 i
= gsi_for_stmt (stmt
);
2064 gsi_insert_before (&i
, clobber_stmt
, GSI_SAME_STMT
);
2066 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2069 *visited
= new gimple_htab (10);
2071 slot
= (*visited
)->find_slot (stmt
, INSERT
);
2076 insert_clobber_before_stack_restore (gimple_phi_result (stmt
), var
,
2079 else if (gimple_assign_ssa_name_copy_p (stmt
))
2080 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt
), var
,
2083 gcc_assert (is_gimple_debug (stmt
));
2086 /* Advance the iterator to the previous non-debug gimple statement in the same
2087 or dominating basic block. */
2090 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator
*i
)
2094 gsi_prev_nondebug (i
);
2095 while (gsi_end_p (*i
))
2097 dom
= get_immediate_dominator (CDI_DOMINATORS
, i
->bb
);
2098 if (dom
== NULL
|| dom
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
2101 *i
= gsi_last_bb (dom
);
2105 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
2106 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2108 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
2109 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
2110 that case the function gives up without inserting the clobbers. */
2113 insert_clobbers_for_var (gimple_stmt_iterator i
, tree var
)
2117 gimple_htab
*visited
= NULL
;
2119 for (; !gsi_end_p (i
); gsi_prev_dom_bb_nondebug (&i
))
2121 stmt
= gsi_stmt (i
);
2123 if (!gimple_call_builtin_p (stmt
, BUILT_IN_STACK_SAVE
))
2126 saved_val
= gimple_call_lhs (stmt
);
2127 if (saved_val
== NULL_TREE
)
2130 insert_clobber_before_stack_restore (saved_val
, var
, &visited
);
2137 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
2138 fixed-size array and returns the address, if found, otherwise returns
2142 fold_builtin_alloca_with_align (gimple
*stmt
)
2144 unsigned HOST_WIDE_INT size
, threshold
, n_elem
;
2145 tree lhs
, arg
, block
, var
, elem_type
, array_type
;
2148 lhs
= gimple_call_lhs (stmt
);
2149 if (lhs
== NULL_TREE
)
2152 /* Detect constant argument. */
2153 arg
= get_constant_value (gimple_call_arg (stmt
, 0));
2154 if (arg
== NULL_TREE
2155 || TREE_CODE (arg
) != INTEGER_CST
2156 || !tree_fits_uhwi_p (arg
))
2159 size
= tree_to_uhwi (arg
);
2161 /* Heuristic: don't fold large allocas. */
2162 threshold
= (unsigned HOST_WIDE_INT
)PARAM_VALUE (PARAM_LARGE_STACK_FRAME
);
2163 /* In case the alloca is located at function entry, it has the same lifetime
2164 as a declared array, so we allow a larger size. */
2165 block
= gimple_block (stmt
);
2166 if (!(cfun
->after_inlining
2168 && TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
))
2170 if (size
> threshold
)
2173 /* Declare array. */
2174 elem_type
= build_nonstandard_integer_type (BITS_PER_UNIT
, 1);
2175 n_elem
= size
* 8 / BITS_PER_UNIT
;
2176 array_type
= build_array_type_nelts (elem_type
, n_elem
);
2177 var
= create_tmp_var (array_type
);
2178 SET_DECL_ALIGN (var
, TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1)));
2180 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (lhs
);
2181 if (pi
!= NULL
&& !pi
->pt
.anything
)
2185 singleton_p
= pt_solution_singleton_or_null_p (&pi
->pt
, &uid
);
2186 gcc_assert (singleton_p
);
2187 SET_DECL_PT_UID (var
, uid
);
2191 /* Fold alloca to the address of the array. */
2192 return fold_convert (TREE_TYPE (lhs
), build_fold_addr_expr (var
));
2195 /* Fold the stmt at *GSI with CCP specific information that propagating
2196 and regular folding does not catch. */
2199 ccp_folder::fold_stmt (gimple_stmt_iterator
*gsi
)
2201 gimple
*stmt
= gsi_stmt (*gsi
);
2203 switch (gimple_code (stmt
))
2207 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
2208 ccp_prop_value_t val
;
2209 /* Statement evaluation will handle type mismatches in constants
2210 more gracefully than the final propagation. This allows us to
2211 fold more conditionals here. */
2212 val
= evaluate_stmt (stmt
);
2213 if (val
.lattice_val
!= CONSTANT
2219 fprintf (dump_file
, "Folding predicate ");
2220 print_gimple_expr (dump_file
, stmt
, 0);
2221 fprintf (dump_file
, " to ");
2222 print_generic_expr (dump_file
, val
.value
);
2223 fprintf (dump_file
, "\n");
2226 if (integer_zerop (val
.value
))
2227 gimple_cond_make_false (cond_stmt
);
2229 gimple_cond_make_true (cond_stmt
);
2236 tree lhs
= gimple_call_lhs (stmt
);
2237 int flags
= gimple_call_flags (stmt
);
2240 bool changed
= false;
2243 /* If the call was folded into a constant make sure it goes
2244 away even if we cannot propagate into all uses because of
2247 && TREE_CODE (lhs
) == SSA_NAME
2248 && (val
= get_constant_value (lhs
))
2249 /* Don't optimize away calls that have side-effects. */
2250 && (flags
& (ECF_CONST
|ECF_PURE
)) != 0
2251 && (flags
& ECF_LOOPING_CONST_OR_PURE
) == 0)
2253 tree new_rhs
= unshare_expr (val
);
2255 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
2256 TREE_TYPE (new_rhs
)))
2257 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
2258 res
= update_call_from_tree (gsi
, new_rhs
);
2263 /* Internal calls provide no argument types, so the extra laxity
2264 for normal calls does not apply. */
2265 if (gimple_call_internal_p (stmt
))
2268 /* The heuristic of fold_builtin_alloca_with_align differs before and
2269 after inlining, so we don't require the arg to be changed into a
2270 constant for folding, but just to be constant. */
2271 if (gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
)
2272 || gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
))
2274 tree new_rhs
= fold_builtin_alloca_with_align (stmt
);
2277 bool res
= update_call_from_tree (gsi
, new_rhs
);
2278 tree var
= TREE_OPERAND (TREE_OPERAND (new_rhs
, 0),0);
2280 insert_clobbers_for_var (*gsi
, var
);
2285 /* Propagate into the call arguments. Compared to replace_uses_in
2286 this can use the argument slot types for type verification
2287 instead of the current argument type. We also can safely
2288 drop qualifiers here as we are dealing with constants anyway. */
2289 argt
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
2290 for (i
= 0; i
< gimple_call_num_args (stmt
) && argt
;
2291 ++i
, argt
= TREE_CHAIN (argt
))
2293 tree arg
= gimple_call_arg (stmt
, i
);
2294 if (TREE_CODE (arg
) == SSA_NAME
2295 && (val
= get_constant_value (arg
))
2296 && useless_type_conversion_p
2297 (TYPE_MAIN_VARIANT (TREE_VALUE (argt
)),
2298 TYPE_MAIN_VARIANT (TREE_TYPE (val
))))
2300 gimple_call_set_arg (stmt
, i
, unshare_expr (val
));
2310 tree lhs
= gimple_assign_lhs (stmt
);
2313 /* If we have a load that turned out to be constant replace it
2314 as we cannot propagate into all uses in all cases. */
2315 if (gimple_assign_single_p (stmt
)
2316 && TREE_CODE (lhs
) == SSA_NAME
2317 && (val
= get_constant_value (lhs
)))
2319 tree rhs
= unshare_expr (val
);
2320 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2321 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
2322 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
2334 /* Visit the assignment statement STMT. Set the value of its LHS to the
2335 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2336 creates virtual definitions, set the value of each new name to that
2337 of the RHS (if we can derive a constant out of the RHS).
2338 Value-returning call statements also perform an assignment, and
2339 are handled here. */
2341 static enum ssa_prop_result
2342 visit_assignment (gimple
*stmt
, tree
*output_p
)
2344 ccp_prop_value_t val
;
2345 enum ssa_prop_result retval
= SSA_PROP_NOT_INTERESTING
;
2347 tree lhs
= gimple_get_lhs (stmt
);
2348 if (TREE_CODE (lhs
) == SSA_NAME
)
2350 /* Evaluate the statement, which could be
2351 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2352 val
= evaluate_stmt (stmt
);
2354 /* If STMT is an assignment to an SSA_NAME, we only have one
2356 if (set_lattice_value (lhs
, &val
))
2359 if (val
.lattice_val
== VARYING
)
2360 retval
= SSA_PROP_VARYING
;
2362 retval
= SSA_PROP_INTERESTING
;
2370 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2371 if it can determine which edge will be taken. Otherwise, return
2372 SSA_PROP_VARYING. */
2374 static enum ssa_prop_result
2375 visit_cond_stmt (gimple
*stmt
, edge
*taken_edge_p
)
2377 ccp_prop_value_t val
;
2380 block
= gimple_bb (stmt
);
2381 val
= evaluate_stmt (stmt
);
2382 if (val
.lattice_val
!= CONSTANT
2384 return SSA_PROP_VARYING
;
2386 /* Find which edge out of the conditional block will be taken and add it
2387 to the worklist. If no single edge can be determined statically,
2388 return SSA_PROP_VARYING to feed all the outgoing edges to the
2389 propagation engine. */
2390 *taken_edge_p
= find_taken_edge (block
, val
.value
);
2392 return SSA_PROP_INTERESTING
;
2394 return SSA_PROP_VARYING
;
2398 /* Evaluate statement STMT. If the statement produces an output value and
2399 its evaluation changes the lattice value of its output, return
2400 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2403 If STMT is a conditional branch and we can determine its truth
2404 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2405 value, return SSA_PROP_VARYING. */
2407 enum ssa_prop_result
2408 ccp_propagate::visit_stmt (gimple
*stmt
, edge
*taken_edge_p
, tree
*output_p
)
2413 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2415 fprintf (dump_file
, "\nVisiting statement:\n");
2416 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2419 switch (gimple_code (stmt
))
2422 /* If the statement is an assignment that produces a single
2423 output value, evaluate its RHS to see if the lattice value of
2424 its output has changed. */
2425 return visit_assignment (stmt
, output_p
);
2428 /* A value-returning call also performs an assignment. */
2429 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2430 return visit_assignment (stmt
, output_p
);
2435 /* If STMT is a conditional branch, see if we can determine
2436 which branch will be taken. */
2437 /* FIXME. It appears that we should be able to optimize
2438 computed GOTOs here as well. */
2439 return visit_cond_stmt (stmt
, taken_edge_p
);
2445 /* Any other kind of statement is not interesting for constant
2446 propagation and, therefore, not worth simulating. */
2447 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2448 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
2450 /* Definitions made by statements other than assignments to
2451 SSA_NAMEs represent unknown modifications to their outputs.
2452 Mark them VARYING. */
2453 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2454 set_value_varying (def
);
2456 return SSA_PROP_VARYING
;
2460 /* Main entry point for SSA Conditional Constant Propagation. If NONZERO_P,
2461 record nonzero bits. */
2464 do_ssa_ccp (bool nonzero_p
)
2466 unsigned int todo
= 0;
2467 calculate_dominance_info (CDI_DOMINATORS
);
2470 class ccp_propagate ccp_propagate
;
2471 ccp_propagate
.ssa_propagate ();
2472 if (ccp_finalize (nonzero_p
|| flag_ipa_bit_cp
))
2474 todo
= (TODO_cleanup_cfg
| TODO_update_ssa
);
2476 /* ccp_finalize does not preserve loop-closed ssa. */
2477 loops_state_clear (LOOP_CLOSED_SSA
);
2480 free_dominance_info (CDI_DOMINATORS
);
2487 const pass_data pass_data_ccp
=
2489 GIMPLE_PASS
, /* type */
2491 OPTGROUP_NONE
, /* optinfo_flags */
2492 TV_TREE_CCP
, /* tv_id */
2493 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2494 0, /* properties_provided */
2495 0, /* properties_destroyed */
2496 0, /* todo_flags_start */
2497 TODO_update_address_taken
, /* todo_flags_finish */
2500 class pass_ccp
: public gimple_opt_pass
2503 pass_ccp (gcc::context
*ctxt
)
2504 : gimple_opt_pass (pass_data_ccp
, ctxt
), nonzero_p (false)
2507 /* opt_pass methods: */
2508 opt_pass
* clone () { return new pass_ccp (m_ctxt
); }
2509 void set_pass_param (unsigned int n
, bool param
)
2511 gcc_assert (n
== 0);
2514 virtual bool gate (function
*) { return flag_tree_ccp
!= 0; }
2515 virtual unsigned int execute (function
*) { return do_ssa_ccp (nonzero_p
); }
2518 /* Determines whether the pass instance records nonzero bits. */
2520 }; // class pass_ccp
2525 make_pass_ccp (gcc::context
*ctxt
)
2527 return new pass_ccp (ctxt
);
2532 /* Try to optimize out __builtin_stack_restore. Optimize it out
2533 if there is another __builtin_stack_restore in the same basic
2534 block and no calls or ASM_EXPRs are in between, or if this block's
2535 only outgoing edge is to EXIT_BLOCK and there are no calls or
2536 ASM_EXPRs after this __builtin_stack_restore. */
2539 optimize_stack_restore (gimple_stmt_iterator i
)
2544 basic_block bb
= gsi_bb (i
);
2545 gimple
*call
= gsi_stmt (i
);
2547 if (gimple_code (call
) != GIMPLE_CALL
2548 || gimple_call_num_args (call
) != 1
2549 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
2550 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
2553 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
2555 stmt
= gsi_stmt (i
);
2556 if (gimple_code (stmt
) == GIMPLE_ASM
)
2558 if (gimple_code (stmt
) != GIMPLE_CALL
)
2561 callee
= gimple_call_fndecl (stmt
);
2563 || !fndecl_built_in_p (callee
, BUILT_IN_NORMAL
)
2564 /* All regular builtins are ok, just obviously not alloca. */
2565 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee
)))
2568 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
2569 goto second_stack_restore
;
2575 /* Allow one successor of the exit block, or zero successors. */
2576 switch (EDGE_COUNT (bb
->succs
))
2581 if (single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2587 second_stack_restore
:
2589 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2590 If there are multiple uses, then the last one should remove the call.
2591 In any case, whether the call to __builtin_stack_save can be removed
2592 or not is irrelevant to removing the call to __builtin_stack_restore. */
2593 if (has_single_use (gimple_call_arg (call
, 0)))
2595 gimple
*stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
2596 if (is_gimple_call (stack_save
))
2598 callee
= gimple_call_fndecl (stack_save
);
2599 if (callee
&& fndecl_built_in_p (callee
, BUILT_IN_STACK_SAVE
))
2601 gimple_stmt_iterator stack_save_gsi
;
2604 stack_save_gsi
= gsi_for_stmt (stack_save
);
2605 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
2606 update_call_from_tree (&stack_save_gsi
, rhs
);
2611 /* No effect, so the statement will be deleted. */
2612 return integer_zero_node
;
2615 /* If va_list type is a simple pointer and nothing special is needed,
2616 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2617 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2618 pointer assignment. */
2621 optimize_stdarg_builtin (gimple
*call
)
2623 tree callee
, lhs
, rhs
, cfun_va_list
;
2624 bool va_list_simple_ptr
;
2625 location_t loc
= gimple_location (call
);
2627 if (gimple_code (call
) != GIMPLE_CALL
)
2630 callee
= gimple_call_fndecl (call
);
2632 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
2633 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
2634 && (TREE_TYPE (cfun_va_list
) == void_type_node
2635 || TREE_TYPE (cfun_va_list
) == char_type_node
);
2637 switch (DECL_FUNCTION_CODE (callee
))
2639 case BUILT_IN_VA_START
:
2640 if (!va_list_simple_ptr
2641 || targetm
.expand_builtin_va_start
!= NULL
2642 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG
))
2645 if (gimple_call_num_args (call
) != 2)
2648 lhs
= gimple_call_arg (call
, 0);
2649 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2650 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2651 != TYPE_MAIN_VARIANT (cfun_va_list
))
2654 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
2655 rhs
= build_call_expr_loc (loc
, builtin_decl_explicit (BUILT_IN_NEXT_ARG
),
2656 1, integer_zero_node
);
2657 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
2658 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2660 case BUILT_IN_VA_COPY
:
2661 if (!va_list_simple_ptr
)
2664 if (gimple_call_num_args (call
) != 2)
2667 lhs
= gimple_call_arg (call
, 0);
2668 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2669 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2670 != TYPE_MAIN_VARIANT (cfun_va_list
))
2673 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
2674 rhs
= gimple_call_arg (call
, 1);
2675 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
2676 != TYPE_MAIN_VARIANT (cfun_va_list
))
2679 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
2680 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2682 case BUILT_IN_VA_END
:
2683 /* No effect, so the statement will be deleted. */
2684 return integer_zero_node
;
2691 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2692 the incoming jumps. Return true if at least one jump was changed. */
2695 optimize_unreachable (gimple_stmt_iterator i
)
2697 basic_block bb
= gsi_bb (i
);
2698 gimple_stmt_iterator gsi
;
2704 if (flag_sanitize
& SANITIZE_UNREACHABLE
)
2707 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2709 stmt
= gsi_stmt (gsi
);
2711 if (is_gimple_debug (stmt
))
2714 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2716 /* Verify we do not need to preserve the label. */
2717 if (FORCED_LABEL (gimple_label_label (label_stmt
)))
2723 /* Only handle the case that __builtin_unreachable is the first statement
2724 in the block. We rely on DCE to remove stmts without side-effects
2725 before __builtin_unreachable. */
2726 if (gsi_stmt (gsi
) != gsi_stmt (i
))
2731 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2733 gsi
= gsi_last_bb (e
->src
);
2734 if (gsi_end_p (gsi
))
2737 stmt
= gsi_stmt (gsi
);
2738 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
2740 if (e
->flags
& EDGE_TRUE_VALUE
)
2741 gimple_cond_make_false (cond_stmt
);
2742 else if (e
->flags
& EDGE_FALSE_VALUE
)
2743 gimple_cond_make_true (cond_stmt
);
2746 update_stmt (cond_stmt
);
2750 /* Todo: handle other cases. Note that unreachable switch case
2751 statements have already been removed. */
2762 mask_2 = 1 << cnt_1;
2763 _4 = __atomic_fetch_or_* (ptr_6, mask_2, _3);
2766 _4 = ATOMIC_BIT_TEST_AND_SET (ptr_6, cnt_1, 0, _3);
2768 If _5 is only used in _5 != 0 or _5 == 0 comparisons, 1
2769 is passed instead of 0, and the builtin just returns a zero
2770 or 1 value instead of the actual bit.
2771 Similarly for __sync_fetch_and_or_* (without the ", _3" part
2772 in there), and/or if mask_2 is a power of 2 constant.
2773 Similarly for xor instead of or, use ATOMIC_BIT_TEST_AND_COMPLEMENT
2774 in that case. And similarly for and instead of or, except that
2775 the second argument to the builtin needs to be one's complement
2776 of the mask instead of mask. */
2779 optimize_atomic_bit_test_and (gimple_stmt_iterator
*gsip
,
2780 enum internal_fn fn
, bool has_model_arg
,
2783 gimple
*call
= gsi_stmt (*gsip
);
2784 tree lhs
= gimple_call_lhs (call
);
2785 use_operand_p use_p
;
2790 if (!flag_inline_atomics
2792 || !gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
2794 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
)
2795 || !single_imm_use (lhs
, &use_p
, &use_stmt
)
2796 || !is_gimple_assign (use_stmt
)
2797 || gimple_assign_rhs_code (use_stmt
) != BIT_AND_EXPR
2798 || !gimple_vdef (call
))
2803 case IFN_ATOMIC_BIT_TEST_AND_SET
:
2804 optab
= atomic_bit_test_and_set_optab
;
2806 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
2807 optab
= atomic_bit_test_and_complement_optab
;
2809 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
2810 optab
= atomic_bit_test_and_reset_optab
;
2816 if (optab_handler (optab
, TYPE_MODE (TREE_TYPE (lhs
))) == CODE_FOR_nothing
)
2819 mask
= gimple_call_arg (call
, 1);
2820 tree use_lhs
= gimple_assign_lhs (use_stmt
);
2824 if (TREE_CODE (mask
) == INTEGER_CST
)
2826 if (fn
== IFN_ATOMIC_BIT_TEST_AND_RESET
)
2827 mask
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (mask
), mask
);
2828 mask
= fold_convert (TREE_TYPE (lhs
), mask
);
2829 int ibit
= tree_log2 (mask
);
2832 bit
= build_int_cst (TREE_TYPE (lhs
), ibit
);
2834 else if (TREE_CODE (mask
) == SSA_NAME
)
2836 gimple
*g
= SSA_NAME_DEF_STMT (mask
);
2837 if (fn
== IFN_ATOMIC_BIT_TEST_AND_RESET
)
2839 if (!is_gimple_assign (g
)
2840 || gimple_assign_rhs_code (g
) != BIT_NOT_EXPR
)
2842 mask
= gimple_assign_rhs1 (g
);
2843 if (TREE_CODE (mask
) != SSA_NAME
)
2845 g
= SSA_NAME_DEF_STMT (mask
);
2847 if (!is_gimple_assign (g
)
2848 || gimple_assign_rhs_code (g
) != LSHIFT_EXPR
2849 || !integer_onep (gimple_assign_rhs1 (g
)))
2851 bit
= gimple_assign_rhs2 (g
);
2856 if (gimple_assign_rhs1 (use_stmt
) == lhs
)
2858 if (!operand_equal_p (gimple_assign_rhs2 (use_stmt
), mask
, 0))
2861 else if (gimple_assign_rhs2 (use_stmt
) != lhs
2862 || !operand_equal_p (gimple_assign_rhs1 (use_stmt
), mask
, 0))
2865 bool use_bool
= true;
2866 bool has_debug_uses
= false;
2867 imm_use_iterator iter
;
2870 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use_lhs
))
2872 FOR_EACH_IMM_USE_STMT (g
, iter
, use_lhs
)
2874 enum tree_code code
= ERROR_MARK
;
2875 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
2876 if (is_gimple_debug (g
))
2878 has_debug_uses
= true;
2881 else if (is_gimple_assign (g
))
2882 switch (gimple_assign_rhs_code (g
))
2885 op1
= gimple_assign_rhs1 (g
);
2886 code
= TREE_CODE (op1
);
2887 op0
= TREE_OPERAND (op1
, 0);
2888 op1
= TREE_OPERAND (op1
, 1);
2892 code
= gimple_assign_rhs_code (g
);
2893 op0
= gimple_assign_rhs1 (g
);
2894 op1
= gimple_assign_rhs2 (g
);
2899 else if (gimple_code (g
) == GIMPLE_COND
)
2901 code
= gimple_cond_code (g
);
2902 op0
= gimple_cond_lhs (g
);
2903 op1
= gimple_cond_rhs (g
);
2906 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
2908 && integer_zerop (op1
))
2910 use_operand_p use_p
;
2912 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
2919 BREAK_FROM_IMM_USE_STMT (iter
);
2922 tree new_lhs
= make_ssa_name (TREE_TYPE (lhs
));
2923 tree flag
= build_int_cst (TREE_TYPE (lhs
), use_bool
);
2925 g
= gimple_build_call_internal (fn
, 4, gimple_call_arg (call
, 0),
2926 bit
, flag
, gimple_call_arg (call
, 2));
2928 g
= gimple_build_call_internal (fn
, 3, gimple_call_arg (call
, 0),
2930 gimple_call_set_lhs (g
, new_lhs
);
2931 gimple_set_location (g
, gimple_location (call
));
2932 gimple_set_vuse (g
, gimple_vuse (call
));
2933 gimple_set_vdef (g
, gimple_vdef (call
));
2934 bool throws
= stmt_can_throw_internal (call
);
2935 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
2936 gimple_call_nothrow_p (as_a
<gcall
*> (call
)));
2937 SSA_NAME_DEF_STMT (gimple_vdef (call
)) = g
;
2938 gimple_stmt_iterator gsi
= *gsip
;
2939 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2943 maybe_clean_or_replace_eh_stmt (call
, g
);
2944 if (after
|| (use_bool
&& has_debug_uses
))
2945 e
= find_fallthru_edge (gsi_bb (gsi
)->succs
);
2949 /* The internal function returns the value of the specified bit
2950 before the atomic operation. If we are interested in the value
2951 of the specified bit after the atomic operation (makes only sense
2952 for xor, otherwise the bit content is compile time known),
2953 we need to invert the bit. */
2954 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (lhs
)),
2955 BIT_XOR_EXPR
, new_lhs
,
2956 use_bool
? build_int_cst (TREE_TYPE (lhs
), 1)
2958 new_lhs
= gimple_assign_lhs (g
);
2961 gsi_insert_on_edge_immediate (e
, g
);
2962 gsi
= gsi_for_stmt (g
);
2965 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2967 if (use_bool
&& has_debug_uses
)
2969 tree temp
= NULL_TREE
;
2970 if (!throws
|| after
|| single_pred_p (e
->dest
))
2972 temp
= make_node (DEBUG_EXPR_DECL
);
2973 DECL_ARTIFICIAL (temp
) = 1;
2974 TREE_TYPE (temp
) = TREE_TYPE (lhs
);
2975 SET_DECL_MODE (temp
, TYPE_MODE (TREE_TYPE (lhs
)));
2976 tree t
= build2 (LSHIFT_EXPR
, TREE_TYPE (lhs
), new_lhs
, bit
);
2977 g
= gimple_build_debug_bind (temp
, t
, g
);
2978 if (throws
&& !after
)
2980 gsi
= gsi_after_labels (e
->dest
);
2981 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
2984 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2986 FOR_EACH_IMM_USE_STMT (g
, iter
, use_lhs
)
2987 if (is_gimple_debug (g
))
2989 use_operand_p use_p
;
2990 if (temp
== NULL_TREE
)
2991 gimple_debug_bind_reset_value (g
);
2993 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
2994 SET_USE (use_p
, temp
);
2998 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_lhs
)
2999 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use_lhs
);
3000 replace_uses_by (use_lhs
, new_lhs
);
3001 gsi
= gsi_for_stmt (use_stmt
);
3002 gsi_remove (&gsi
, true);
3003 release_defs (use_stmt
);
3004 gsi_remove (gsip
, true);
3005 release_ssa_name (lhs
);
3014 Similarly for memset (&a, ..., sizeof (a)); instead of a = {};
3015 and/or memcpy (&b, &a, sizeof (a)); instead of b = a; */
3018 optimize_memcpy (gimple_stmt_iterator
*gsip
, tree dest
, tree src
, tree len
)
3020 gimple
*stmt
= gsi_stmt (*gsip
);
3021 if (gimple_has_volatile_ops (stmt
))
3024 tree vuse
= gimple_vuse (stmt
);
3028 gimple
*defstmt
= SSA_NAME_DEF_STMT (vuse
);
3029 tree src2
= NULL_TREE
, len2
= NULL_TREE
;
3030 poly_int64 offset
, offset2
;
3031 tree val
= integer_zero_node
;
3032 if (gimple_store_p (defstmt
)
3033 && gimple_assign_single_p (defstmt
)
3034 && TREE_CODE (gimple_assign_rhs1 (defstmt
)) == CONSTRUCTOR
3035 && !gimple_clobber_p (defstmt
))
3036 src2
= gimple_assign_lhs (defstmt
);
3037 else if (gimple_call_builtin_p (defstmt
, BUILT_IN_MEMSET
)
3038 && TREE_CODE (gimple_call_arg (defstmt
, 0)) == ADDR_EXPR
3039 && TREE_CODE (gimple_call_arg (defstmt
, 1)) == INTEGER_CST
)
3041 src2
= TREE_OPERAND (gimple_call_arg (defstmt
, 0), 0);
3042 len2
= gimple_call_arg (defstmt
, 2);
3043 val
= gimple_call_arg (defstmt
, 1);
3044 /* For non-0 val, we'd have to transform stmt from assignment
3045 into memset (only if dest is addressable). */
3046 if (!integer_zerop (val
) && is_gimple_assign (stmt
))
3050 if (src2
== NULL_TREE
)
3053 if (len
== NULL_TREE
)
3054 len
= (TREE_CODE (src
) == COMPONENT_REF
3055 ? DECL_SIZE_UNIT (TREE_OPERAND (src
, 1))
3056 : TYPE_SIZE_UNIT (TREE_TYPE (src
)));
3057 if (len2
== NULL_TREE
)
3058 len2
= (TREE_CODE (src2
) == COMPONENT_REF
3059 ? DECL_SIZE_UNIT (TREE_OPERAND (src2
, 1))
3060 : TYPE_SIZE_UNIT (TREE_TYPE (src2
)));
3061 if (len
== NULL_TREE
3062 || !poly_int_tree_p (len
)
3063 || len2
== NULL_TREE
3064 || !poly_int_tree_p (len2
))
3067 src
= get_addr_base_and_unit_offset (src
, &offset
);
3068 src2
= get_addr_base_and_unit_offset (src2
, &offset2
);
3069 if (src
== NULL_TREE
3070 || src2
== NULL_TREE
3071 || maybe_lt (offset
, offset2
))
3074 if (!operand_equal_p (src
, src2
, 0))
3077 /* [ src + offset2, src + offset2 + len2 - 1 ] is set to val.
3079 [ src + offset, src + offset + len - 1 ] is a subset of that. */
3080 if (maybe_gt (wi::to_poly_offset (len
) + (offset
- offset2
),
3081 wi::to_poly_offset (len2
)))
3084 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3086 fprintf (dump_file
, "Simplified\n ");
3087 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3088 fprintf (dump_file
, "after previous\n ");
3089 print_gimple_stmt (dump_file
, defstmt
, 0, dump_flags
);
3092 /* For simplicity, don't change the kind of the stmt,
3093 turn dest = src; into dest = {}; and memcpy (&dest, &src, len);
3094 into memset (&dest, val, len);
3095 In theory we could change dest = src into memset if dest
3096 is addressable (maybe beneficial if val is not 0), or
3097 memcpy (&dest, &src, len) into dest = {} if len is the size
3098 of dest, dest isn't volatile. */
3099 if (is_gimple_assign (stmt
))
3101 tree ctor
= build_constructor (TREE_TYPE (dest
), NULL
);
3102 gimple_assign_set_rhs_from_tree (gsip
, ctor
);
3105 else /* If stmt is memcpy, transform it into memset. */
3107 gcall
*call
= as_a
<gcall
*> (stmt
);
3108 tree fndecl
= builtin_decl_implicit (BUILT_IN_MEMSET
);
3109 gimple_call_set_fndecl (call
, fndecl
);
3110 gimple_call_set_fntype (call
, TREE_TYPE (fndecl
));
3111 gimple_call_set_arg (call
, 1, val
);
3115 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3117 fprintf (dump_file
, "into\n ");
3118 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3122 /* A simple pass that attempts to fold all builtin functions. This pass
3123 is run after we've propagated as many constants as we can. */
3127 const pass_data pass_data_fold_builtins
=
3129 GIMPLE_PASS
, /* type */
3131 OPTGROUP_NONE
, /* optinfo_flags */
3132 TV_NONE
, /* tv_id */
3133 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3134 0, /* properties_provided */
3135 0, /* properties_destroyed */
3136 0, /* todo_flags_start */
3137 TODO_update_ssa
, /* todo_flags_finish */
3140 class pass_fold_builtins
: public gimple_opt_pass
3143 pass_fold_builtins (gcc::context
*ctxt
)
3144 : gimple_opt_pass (pass_data_fold_builtins
, ctxt
)
3147 /* opt_pass methods: */
3148 opt_pass
* clone () { return new pass_fold_builtins (m_ctxt
); }
3149 virtual unsigned int execute (function
*);
3151 }; // class pass_fold_builtins
3154 pass_fold_builtins::execute (function
*fun
)
3156 bool cfg_changed
= false;
3158 unsigned int todoflags
= 0;
3160 FOR_EACH_BB_FN (bb
, fun
)
3162 gimple_stmt_iterator i
;
3163 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
3165 gimple
*stmt
, *old_stmt
;
3167 enum built_in_function fcode
;
3169 stmt
= gsi_stmt (i
);
3171 if (gimple_code (stmt
) != GIMPLE_CALL
)
3173 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
3174 after the last GIMPLE DSE they aren't needed and might
3175 unnecessarily keep the SSA_NAMEs live. */
3176 if (gimple_clobber_p (stmt
))
3178 tree lhs
= gimple_assign_lhs (stmt
);
3179 if (TREE_CODE (lhs
) == MEM_REF
3180 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
3182 unlink_stmt_vdef (stmt
);
3183 gsi_remove (&i
, true);
3184 release_defs (stmt
);
3188 else if (gimple_assign_load_p (stmt
) && gimple_store_p (stmt
))
3189 optimize_memcpy (&i
, gimple_assign_lhs (stmt
),
3190 gimple_assign_rhs1 (stmt
), NULL_TREE
);
3195 callee
= gimple_call_fndecl (stmt
);
3196 if (!callee
|| !fndecl_built_in_p (callee
, BUILT_IN_NORMAL
))
3202 fcode
= DECL_FUNCTION_CODE (callee
);
3207 tree result
= NULL_TREE
;
3208 switch (DECL_FUNCTION_CODE (callee
))
3210 case BUILT_IN_CONSTANT_P
:
3211 /* Resolve __builtin_constant_p. If it hasn't been
3212 folded to integer_one_node by now, it's fairly
3213 certain that the value simply isn't constant. */
3214 result
= integer_zero_node
;
3217 case BUILT_IN_ASSUME_ALIGNED
:
3218 /* Remove __builtin_assume_aligned. */
3219 result
= gimple_call_arg (stmt
, 0);
3222 case BUILT_IN_STACK_RESTORE
:
3223 result
= optimize_stack_restore (i
);
3229 case BUILT_IN_UNREACHABLE
:
3230 if (optimize_unreachable (i
))
3234 case BUILT_IN_ATOMIC_FETCH_OR_1
:
3235 case BUILT_IN_ATOMIC_FETCH_OR_2
:
3236 case BUILT_IN_ATOMIC_FETCH_OR_4
:
3237 case BUILT_IN_ATOMIC_FETCH_OR_8
:
3238 case BUILT_IN_ATOMIC_FETCH_OR_16
:
3239 optimize_atomic_bit_test_and (&i
,
3240 IFN_ATOMIC_BIT_TEST_AND_SET
,
3243 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
3244 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
3245 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
3246 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
3247 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
3248 optimize_atomic_bit_test_and (&i
,
3249 IFN_ATOMIC_BIT_TEST_AND_SET
,
3253 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
3254 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
3255 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
3256 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
3257 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
3258 optimize_atomic_bit_test_and
3259 (&i
, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
, true, false);
3261 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
3262 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
3263 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
3264 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
3265 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
3266 optimize_atomic_bit_test_and
3267 (&i
, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
, false, false);
3270 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
3271 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
3272 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
3273 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
3274 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
3275 optimize_atomic_bit_test_and
3276 (&i
, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
, true, true);
3278 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
3279 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
3280 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
3281 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
3282 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
3283 optimize_atomic_bit_test_and
3284 (&i
, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
, false, true);
3287 case BUILT_IN_ATOMIC_FETCH_AND_1
:
3288 case BUILT_IN_ATOMIC_FETCH_AND_2
:
3289 case BUILT_IN_ATOMIC_FETCH_AND_4
:
3290 case BUILT_IN_ATOMIC_FETCH_AND_8
:
3291 case BUILT_IN_ATOMIC_FETCH_AND_16
:
3292 optimize_atomic_bit_test_and (&i
,
3293 IFN_ATOMIC_BIT_TEST_AND_RESET
,
3296 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
3297 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
3298 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
3299 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
3300 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
3301 optimize_atomic_bit_test_and (&i
,
3302 IFN_ATOMIC_BIT_TEST_AND_RESET
,
3306 case BUILT_IN_MEMCPY
:
3307 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
3308 && TREE_CODE (gimple_call_arg (stmt
, 0)) == ADDR_EXPR
3309 && TREE_CODE (gimple_call_arg (stmt
, 1)) == ADDR_EXPR
3310 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
)
3312 tree dest
= TREE_OPERAND (gimple_call_arg (stmt
, 0), 0);
3313 tree src
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
3314 tree len
= gimple_call_arg (stmt
, 2);
3315 optimize_memcpy (&i
, dest
, src
, len
);
3319 case BUILT_IN_VA_START
:
3320 case BUILT_IN_VA_END
:
3321 case BUILT_IN_VA_COPY
:
3322 /* These shouldn't be folded before pass_stdarg. */
3323 result
= optimize_stdarg_builtin (stmt
);
3335 if (!update_call_from_tree (&i
, result
))
3336 gimplify_and_update_call_from_tree (&i
, result
);
3339 todoflags
|= TODO_update_address_taken
;
3341 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3343 fprintf (dump_file
, "Simplified\n ");
3344 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3348 stmt
= gsi_stmt (i
);
3351 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
3352 && gimple_purge_dead_eh_edges (bb
))
3355 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3357 fprintf (dump_file
, "to\n ");
3358 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3359 fprintf (dump_file
, "\n");
3362 /* Retry the same statement if it changed into another
3363 builtin, there might be new opportunities now. */
3364 if (gimple_code (stmt
) != GIMPLE_CALL
)
3369 callee
= gimple_call_fndecl (stmt
);
3371 || !fndecl_built_in_p (callee
, fcode
))
3376 /* Delete unreachable blocks. */
3378 todoflags
|= TODO_cleanup_cfg
;
3386 make_pass_fold_builtins (gcc::context
*ctxt
)
3388 return new pass_fold_builtins (ctxt
);
3391 /* A simple pass that emits some warnings post IPA. */
3395 const pass_data pass_data_post_ipa_warn
=
3397 GIMPLE_PASS
, /* type */
3398 "post_ipa_warn", /* name */
3399 OPTGROUP_NONE
, /* optinfo_flags */
3400 TV_NONE
, /* tv_id */
3401 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3402 0, /* properties_provided */
3403 0, /* properties_destroyed */
3404 0, /* todo_flags_start */
3405 0, /* todo_flags_finish */
3408 class pass_post_ipa_warn
: public gimple_opt_pass
3411 pass_post_ipa_warn (gcc::context
*ctxt
)
3412 : gimple_opt_pass (pass_data_post_ipa_warn
, ctxt
)
3415 /* opt_pass methods: */
3416 opt_pass
* clone () { return new pass_post_ipa_warn (m_ctxt
); }
3417 virtual bool gate (function
*) { return warn_nonnull
!= 0; }
3418 virtual unsigned int execute (function
*);
3420 }; // class pass_fold_builtins
3423 pass_post_ipa_warn::execute (function
*fun
)
3427 FOR_EACH_BB_FN (bb
, fun
)
3429 gimple_stmt_iterator gsi
;
3430 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3432 gimple
*stmt
= gsi_stmt (gsi
);
3433 if (!is_gimple_call (stmt
) || gimple_no_warning_p (stmt
))
3439 = get_nonnull_args (gimple_call_fntype (stmt
));
3442 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3444 tree arg
= gimple_call_arg (stmt
, i
);
3445 if (TREE_CODE (TREE_TYPE (arg
)) != POINTER_TYPE
)
3447 if (!integer_zerop (arg
))
3449 if (!bitmap_empty_p (nonnullargs
)
3450 && !bitmap_bit_p (nonnullargs
, i
))
3453 location_t loc
= gimple_location (stmt
);
3454 auto_diagnostic_group d
;
3455 if (warning_at (loc
, OPT_Wnonnull
,
3456 "%Gargument %u null where non-null "
3457 "expected", stmt
, i
+ 1))
3459 tree fndecl
= gimple_call_fndecl (stmt
);
3460 if (fndecl
&& DECL_IS_BUILTIN (fndecl
))
3461 inform (loc
, "in a call to built-in function %qD",
3464 inform (DECL_SOURCE_LOCATION (fndecl
),
3465 "in a call to function %qD declared here",
3470 BITMAP_FREE (nonnullargs
);
3481 make_pass_post_ipa_warn (gcc::context
*ctxt
)
3483 return new pass_post_ipa_warn (ctxt
);