1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2014 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
43 CONSTANT -> V_i has been found to hold a constant
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
80 a_11 = PHI (a_9, a_10)
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
123 #include "coretypes.h"
126 #include "stor-layout.h"
129 #include "basic-block.h"
130 #include "function.h"
131 #include "gimple-pretty-print.h"
132 #include "hash-table.h"
133 #include "tree-ssa-alias.h"
134 #include "internal-fn.h"
135 #include "gimple-fold.h"
137 #include "gimple-expr.h"
140 #include "gimplify.h"
141 #include "gimple-iterator.h"
142 #include "gimple-ssa.h"
143 #include "tree-cfg.h"
144 #include "tree-phinodes.h"
145 #include "ssa-iterators.h"
146 #include "stringpool.h"
147 #include "tree-ssanames.h"
148 #include "tree-pass.h"
149 #include "tree-ssa-propagate.h"
150 #include "value-prof.h"
151 #include "langhooks.h"
153 #include "diagnostic-core.h"
156 #include "wide-int-print.h"
159 /* Possible lattice values. */
168 struct prop_value_d
{
170 ccp_lattice_t lattice_val
;
172 /* Propagated value. */
175 /* Mask that applies to the propagated value during CCP. For X
176 with a CONSTANT lattice value X & ~mask == value & ~mask. The
177 zero bits in the mask cover constant values. The ones mean no
182 typedef struct prop_value_d prop_value_t
;
184 /* Array of propagated constant values. After propagation,
185 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
186 the constant is held in an SSA name representing a memory store
187 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
188 memory reference used to store (i.e., the LHS of the assignment
190 static prop_value_t
*const_val
;
191 static unsigned n_const_val
;
193 static void canonicalize_value (prop_value_t
*);
194 static bool ccp_fold_stmt (gimple_stmt_iterator
*);
196 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
199 dump_lattice_value (FILE *outf
, const char *prefix
, prop_value_t val
)
201 switch (val
.lattice_val
)
204 fprintf (outf
, "%sUNINITIALIZED", prefix
);
207 fprintf (outf
, "%sUNDEFINED", prefix
);
210 fprintf (outf
, "%sVARYING", prefix
);
213 if (TREE_CODE (val
.value
) != INTEGER_CST
216 fprintf (outf
, "%sCONSTANT ", prefix
);
217 print_generic_expr (outf
, val
.value
, dump_flags
);
221 widest_int cval
= wi::bit_and_not (wi::to_widest (val
.value
),
223 fprintf (outf
, "%sCONSTANT ", prefix
);
224 print_hex (cval
, outf
);
225 fprintf (outf
, " (");
226 print_hex (val
.mask
, outf
);
236 /* Print lattice value VAL to stderr. */
238 void debug_lattice_value (prop_value_t val
);
241 debug_lattice_value (prop_value_t val
)
243 dump_lattice_value (stderr
, "", val
);
244 fprintf (stderr
, "\n");
247 /* Extend NONZERO_BITS to a full mask, with the upper bits being set. */
250 extend_mask (const wide_int
&nonzero_bits
)
252 return (wi::mask
<widest_int
> (wi::get_precision (nonzero_bits
), true)
253 | widest_int::from (nonzero_bits
, UNSIGNED
));
256 /* Compute a default value for variable VAR and store it in the
257 CONST_VAL array. The following rules are used to get default
260 1- Global and static variables that are declared constant are
263 2- Any other value is considered UNDEFINED. This is useful when
264 considering PHI nodes. PHI arguments that are undefined do not
265 change the constant value of the PHI node, which allows for more
266 constants to be propagated.
268 3- Variables defined by statements other than assignments and PHI
269 nodes are considered VARYING.
271 4- Initial values of variables that are not GIMPLE registers are
272 considered VARYING. */
275 get_default_value (tree var
)
277 prop_value_t val
= { UNINITIALIZED
, NULL_TREE
, 0 };
280 stmt
= SSA_NAME_DEF_STMT (var
);
282 if (gimple_nop_p (stmt
))
284 /* Variables defined by an empty statement are those used
285 before being initialized. If VAR is a local variable, we
286 can assume initially that it is UNDEFINED, otherwise we must
287 consider it VARYING. */
288 if (!virtual_operand_p (var
)
289 && TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
290 val
.lattice_val
= UNDEFINED
;
293 val
.lattice_val
= VARYING
;
295 if (flag_tree_bit_ccp
)
297 wide_int nonzero_bits
= get_nonzero_bits (var
);
298 if (nonzero_bits
!= -1)
300 val
.lattice_val
= CONSTANT
;
301 val
.value
= build_zero_cst (TREE_TYPE (var
));
302 val
.mask
= extend_mask (nonzero_bits
);
307 else if (is_gimple_assign (stmt
))
310 if (gimple_assign_single_p (stmt
)
311 && DECL_P (gimple_assign_rhs1 (stmt
))
312 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
314 val
.lattice_val
= CONSTANT
;
319 /* Any other variable defined by an assignment is considered
321 val
.lattice_val
= UNDEFINED
;
324 else if ((is_gimple_call (stmt
)
325 && gimple_call_lhs (stmt
) != NULL_TREE
)
326 || gimple_code (stmt
) == GIMPLE_PHI
)
328 /* A variable defined by a call or a PHI node is considered
330 val
.lattice_val
= UNDEFINED
;
334 /* Otherwise, VAR will never take on a constant value. */
335 val
.lattice_val
= VARYING
;
343 /* Get the constant value associated with variable VAR. */
345 static inline prop_value_t
*
350 if (const_val
== NULL
351 || SSA_NAME_VERSION (var
) >= n_const_val
)
354 val
= &const_val
[SSA_NAME_VERSION (var
)];
355 if (val
->lattice_val
== UNINITIALIZED
)
356 *val
= get_default_value (var
);
358 canonicalize_value (val
);
363 /* Return the constant tree value associated with VAR. */
366 get_constant_value (tree var
)
369 if (TREE_CODE (var
) != SSA_NAME
)
371 if (is_gimple_min_invariant (var
))
375 val
= get_value (var
);
377 && val
->lattice_val
== CONSTANT
378 && (TREE_CODE (val
->value
) != INTEGER_CST
384 /* Sets the value associated with VAR to VARYING. */
387 set_value_varying (tree var
)
389 prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
391 val
->lattice_val
= VARYING
;
392 val
->value
= NULL_TREE
;
396 /* For float types, modify the value of VAL to make ccp work correctly
397 for non-standard values (-0, NaN):
399 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
400 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
401 This is to fix the following problem (see PR 29921): Suppose we have
405 and we set value of y to NaN. This causes value of x to be set to NaN.
406 When we later determine that y is in fact VARYING, fold uses the fact
407 that HONOR_NANS is false, and we try to change the value of x to 0,
408 causing an ICE. With HONOR_NANS being false, the real appearance of
409 NaN would cause undefined behavior, though, so claiming that y (and x)
410 are UNDEFINED initially is correct.
412 For other constants, make sure to drop TREE_OVERFLOW. */
415 canonicalize_value (prop_value_t
*val
)
417 enum machine_mode mode
;
421 if (val
->lattice_val
!= CONSTANT
)
424 if (TREE_OVERFLOW_P (val
->value
))
425 val
->value
= drop_tree_overflow (val
->value
);
427 if (TREE_CODE (val
->value
) != REAL_CST
)
430 d
= TREE_REAL_CST (val
->value
);
431 type
= TREE_TYPE (val
->value
);
432 mode
= TYPE_MODE (type
);
434 if (!HONOR_SIGNED_ZEROS (mode
)
435 && REAL_VALUE_MINUS_ZERO (d
))
437 val
->value
= build_real (type
, dconst0
);
441 if (!HONOR_NANS (mode
)
442 && REAL_VALUE_ISNAN (d
))
444 val
->lattice_val
= UNDEFINED
;
450 /* Return whether the lattice transition is valid. */
453 valid_lattice_transition (prop_value_t old_val
, prop_value_t new_val
)
455 /* Lattice transitions must always be monotonically increasing in
457 if (old_val
.lattice_val
< new_val
.lattice_val
)
460 if (old_val
.lattice_val
!= new_val
.lattice_val
)
463 if (!old_val
.value
&& !new_val
.value
)
466 /* Now both lattice values are CONSTANT. */
468 /* Allow transitioning from PHI <&x, not executable> == &x
469 to PHI <&x, &y> == common alignment. */
470 if (TREE_CODE (old_val
.value
) != INTEGER_CST
471 && TREE_CODE (new_val
.value
) == INTEGER_CST
)
474 /* Bit-lattices have to agree in the still valid bits. */
475 if (TREE_CODE (old_val
.value
) == INTEGER_CST
476 && TREE_CODE (new_val
.value
) == INTEGER_CST
)
477 return (wi::bit_and_not (wi::to_widest (old_val
.value
), new_val
.mask
)
478 == wi::bit_and_not (wi::to_widest (new_val
.value
), new_val
.mask
));
480 /* Otherwise constant values have to agree. */
481 return operand_equal_p (old_val
.value
, new_val
.value
, 0);
484 /* Set the value for variable VAR to NEW_VAL. Return true if the new
485 value is different from VAR's previous value. */
488 set_lattice_value (tree var
, prop_value_t new_val
)
490 /* We can deal with old UNINITIALIZED values just fine here. */
491 prop_value_t
*old_val
= &const_val
[SSA_NAME_VERSION (var
)];
493 canonicalize_value (&new_val
);
495 /* We have to be careful to not go up the bitwise lattice
496 represented by the mask.
497 ??? This doesn't seem to be the best place to enforce this. */
498 if (new_val
.lattice_val
== CONSTANT
499 && old_val
->lattice_val
== CONSTANT
500 && TREE_CODE (new_val
.value
) == INTEGER_CST
501 && TREE_CODE (old_val
->value
) == INTEGER_CST
)
503 widest_int diff
= (wi::to_widest (new_val
.value
)
504 ^ wi::to_widest (old_val
->value
));
505 new_val
.mask
= new_val
.mask
| old_val
->mask
| diff
;
508 gcc_assert (valid_lattice_transition (*old_val
, new_val
));
510 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
511 caller that this was a non-transition. */
512 if (old_val
->lattice_val
!= new_val
.lattice_val
513 || (new_val
.lattice_val
== CONSTANT
514 && TREE_CODE (new_val
.value
) == INTEGER_CST
515 && (TREE_CODE (old_val
->value
) != INTEGER_CST
516 || new_val
.mask
!= old_val
->mask
)))
518 /* ??? We would like to delay creation of INTEGER_CSTs from
519 partially constants here. */
521 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
523 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
524 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
529 gcc_assert (new_val
.lattice_val
!= UNINITIALIZED
);
536 static prop_value_t
get_value_for_expr (tree
, bool);
537 static prop_value_t
bit_value_binop (enum tree_code
, tree
, tree
, tree
);
538 static void bit_value_binop_1 (enum tree_code
, tree
, widest_int
*, widest_int
*,
539 tree
, const widest_int
&, const widest_int
&,
540 tree
, const widest_int
&, const widest_int
&);
542 /* Return a widest_int that can be used for bitwise simplifications
546 value_to_wide_int (prop_value_t val
)
549 && TREE_CODE (val
.value
) == INTEGER_CST
)
550 return wi::to_widest (val
.value
);
555 /* Return the value for the address expression EXPR based on alignment
559 get_value_from_alignment (tree expr
)
561 tree type
= TREE_TYPE (expr
);
563 unsigned HOST_WIDE_INT bitpos
;
566 gcc_assert (TREE_CODE (expr
) == ADDR_EXPR
);
568 get_pointer_alignment_1 (expr
, &align
, &bitpos
);
569 val
.mask
= (POINTER_TYPE_P (type
) || TYPE_UNSIGNED (type
)
570 ? wi::mask
<widest_int
> (TYPE_PRECISION (type
), false)
571 : -1).and_not (align
/ BITS_PER_UNIT
- 1);
572 val
.lattice_val
= val
.mask
== -1 ? VARYING
: CONSTANT
;
573 if (val
.lattice_val
== CONSTANT
)
574 val
.value
= build_int_cstu (type
, bitpos
/ BITS_PER_UNIT
);
576 val
.value
= NULL_TREE
;
581 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
582 return constant bits extracted from alignment information for
583 invariant addresses. */
586 get_value_for_expr (tree expr
, bool for_bits_p
)
590 if (TREE_CODE (expr
) == SSA_NAME
)
592 val
= *get_value (expr
);
594 && val
.lattice_val
== CONSTANT
595 && TREE_CODE (val
.value
) == ADDR_EXPR
)
596 val
= get_value_from_alignment (val
.value
);
598 else if (is_gimple_min_invariant (expr
)
599 && (!for_bits_p
|| TREE_CODE (expr
) != ADDR_EXPR
))
601 val
.lattice_val
= CONSTANT
;
604 canonicalize_value (&val
);
606 else if (TREE_CODE (expr
) == ADDR_EXPR
)
607 val
= get_value_from_alignment (expr
);
610 val
.lattice_val
= VARYING
;
612 val
.value
= NULL_TREE
;
617 /* Return the likely CCP lattice value for STMT.
619 If STMT has no operands, then return CONSTANT.
621 Else if undefinedness of operands of STMT cause its value to be
622 undefined, then return UNDEFINED.
624 Else if any operands of STMT are constants, then return CONSTANT.
626 Else return VARYING. */
629 likely_value (gimple stmt
)
631 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
636 enum gimple_code code
= gimple_code (stmt
);
638 /* This function appears to be called only for assignments, calls,
639 conditionals, and switches, due to the logic in visit_stmt. */
640 gcc_assert (code
== GIMPLE_ASSIGN
641 || code
== GIMPLE_CALL
642 || code
== GIMPLE_COND
643 || code
== GIMPLE_SWITCH
);
645 /* If the statement has volatile operands, it won't fold to a
647 if (gimple_has_volatile_ops (stmt
))
650 /* Arrive here for more complex cases. */
651 has_constant_operand
= false;
652 has_undefined_operand
= false;
653 all_undefined_operands
= true;
654 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
656 prop_value_t
*val
= get_value (use
);
658 if (val
->lattice_val
== UNDEFINED
)
659 has_undefined_operand
= true;
661 all_undefined_operands
= false;
663 if (val
->lattice_val
== CONSTANT
)
664 has_constant_operand
= true;
667 /* There may be constants in regular rhs operands. For calls we
668 have to ignore lhs, fndecl and static chain, otherwise only
670 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
671 i
< gimple_num_ops (stmt
); ++i
)
673 tree op
= gimple_op (stmt
, i
);
674 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
676 if (is_gimple_min_invariant (op
))
677 has_constant_operand
= true;
680 if (has_constant_operand
)
681 all_undefined_operands
= false;
683 if (has_undefined_operand
684 && code
== GIMPLE_CALL
685 && gimple_call_internal_p (stmt
))
686 switch (gimple_call_internal_fn (stmt
))
688 /* These 3 builtins use the first argument just as a magic
689 way how to find out a decl uid. */
690 case IFN_GOMP_SIMD_LANE
:
691 case IFN_GOMP_SIMD_VF
:
692 case IFN_GOMP_SIMD_LAST_LANE
:
693 has_undefined_operand
= false;
699 /* If the operation combines operands like COMPLEX_EXPR make sure to
700 not mark the result UNDEFINED if only one part of the result is
702 if (has_undefined_operand
&& all_undefined_operands
)
704 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
706 switch (gimple_assign_rhs_code (stmt
))
708 /* Unary operators are handled with all_undefined_operands. */
711 case POINTER_PLUS_EXPR
:
712 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
713 Not bitwise operators, one VARYING operand may specify the
714 result completely. Not logical operators for the same reason.
715 Not COMPLEX_EXPR as one VARYING operand makes the result partly
716 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
717 the undefined operand may be promoted. */
721 /* If any part of an address is UNDEFINED, like the index
722 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
729 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
730 fall back to CONSTANT. During iteration UNDEFINED may still drop
732 if (has_undefined_operand
)
735 /* We do not consider virtual operands here -- load from read-only
736 memory may have only VARYING virtual operands, but still be
738 if (has_constant_operand
739 || gimple_references_memory_p (stmt
))
745 /* Returns true if STMT cannot be constant. */
748 surely_varying_stmt_p (gimple stmt
)
750 /* If the statement has operands that we cannot handle, it cannot be
752 if (gimple_has_volatile_ops (stmt
))
755 /* If it is a call and does not return a value or is not a
756 builtin and not an indirect call or a call to function with
757 assume_aligned/alloc_align attribute, it is varying. */
758 if (is_gimple_call (stmt
))
760 tree fndecl
, fntype
= gimple_call_fntype (stmt
);
761 if (!gimple_call_lhs (stmt
)
762 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
763 && !DECL_BUILT_IN (fndecl
)
764 && !lookup_attribute ("assume_aligned",
765 TYPE_ATTRIBUTES (fntype
))
766 && !lookup_attribute ("alloc_align",
767 TYPE_ATTRIBUTES (fntype
))))
771 /* Any other store operation is not interesting. */
772 else if (gimple_vdef (stmt
))
775 /* Anything other than assignments and conditional jumps are not
776 interesting for CCP. */
777 if (gimple_code (stmt
) != GIMPLE_ASSIGN
778 && gimple_code (stmt
) != GIMPLE_COND
779 && gimple_code (stmt
) != GIMPLE_SWITCH
780 && gimple_code (stmt
) != GIMPLE_CALL
)
786 /* Initialize local data structures for CCP. */
789 ccp_initialize (void)
793 n_const_val
= num_ssa_names
;
794 const_val
= XCNEWVEC (prop_value_t
, n_const_val
);
796 /* Initialize simulation flags for PHI nodes and statements. */
797 FOR_EACH_BB_FN (bb
, cfun
)
799 gimple_stmt_iterator i
;
801 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
803 gimple stmt
= gsi_stmt (i
);
806 /* If the statement is a control insn, then we do not
807 want to avoid simulating the statement once. Failure
808 to do so means that those edges will never get added. */
809 if (stmt_ends_bb_p (stmt
))
812 is_varying
= surely_varying_stmt_p (stmt
);
819 /* If the statement will not produce a constant, mark
820 all its outputs VARYING. */
821 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
822 set_value_varying (def
);
824 prop_set_simulate_again (stmt
, !is_varying
);
828 /* Now process PHI nodes. We never clear the simulate_again flag on
829 phi nodes, since we do not know which edges are executable yet,
830 except for phi nodes for virtual operands when we do not do store ccp. */
831 FOR_EACH_BB_FN (bb
, cfun
)
833 gimple_stmt_iterator i
;
835 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
837 gimple phi
= gsi_stmt (i
);
839 if (virtual_operand_p (gimple_phi_result (phi
)))
840 prop_set_simulate_again (phi
, false);
842 prop_set_simulate_again (phi
, true);
847 /* Debug count support. Reset the values of ssa names
848 VARYING when the total number ssa names analyzed is
849 beyond the debug count specified. */
855 for (i
= 0; i
< num_ssa_names
; i
++)
859 const_val
[i
].lattice_val
= VARYING
;
860 const_val
[i
].mask
= -1;
861 const_val
[i
].value
= NULL_TREE
;
867 /* Do final substitution of propagated values, cleanup the flowgraph and
868 free allocated storage.
870 Return TRUE when something was optimized. */
875 bool something_changed
;
880 /* Derive alignment and misalignment information from partially
881 constant pointers in the lattice or nonzero bits from partially
882 constant integers. */
883 for (i
= 1; i
< num_ssa_names
; ++i
)
885 tree name
= ssa_name (i
);
887 unsigned int tem
, align
;
890 || (!POINTER_TYPE_P (TREE_TYPE (name
))
891 && (!INTEGRAL_TYPE_P (TREE_TYPE (name
))
892 /* Don't record nonzero bits before IPA to avoid
893 using too much memory. */
894 || first_pass_instance
)))
897 val
= get_value (name
);
898 if (val
->lattice_val
!= CONSTANT
899 || TREE_CODE (val
->value
) != INTEGER_CST
)
902 if (POINTER_TYPE_P (TREE_TYPE (name
)))
904 /* Trailing mask bits specify the alignment, trailing value
905 bits the misalignment. */
906 tem
= val
->mask
.to_uhwi ();
907 align
= (tem
& -tem
);
909 set_ptr_info_alignment (get_ptr_info (name
), align
,
910 (TREE_INT_CST_LOW (val
->value
)
915 unsigned int precision
= TYPE_PRECISION (TREE_TYPE (val
->value
));
916 wide_int nonzero_bits
= wide_int::from (val
->mask
, precision
,
917 UNSIGNED
) | val
->value
;
918 nonzero_bits
&= get_nonzero_bits (name
);
919 set_nonzero_bits (name
, nonzero_bits
);
923 /* Perform substitutions based on the known constant values. */
924 something_changed
= substitute_and_fold (get_constant_value
,
925 ccp_fold_stmt
, true);
929 return something_changed
;;
933 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
936 any M UNDEFINED = any
937 any M VARYING = VARYING
938 Ci M Cj = Ci if (i == j)
939 Ci M Cj = VARYING if (i != j)
943 ccp_lattice_meet (prop_value_t
*val1
, prop_value_t
*val2
)
945 if (val1
->lattice_val
== UNDEFINED
)
947 /* UNDEFINED M any = any */
950 else if (val2
->lattice_val
== UNDEFINED
)
952 /* any M UNDEFINED = any
953 Nothing to do. VAL1 already contains the value we want. */
956 else if (val1
->lattice_val
== VARYING
957 || val2
->lattice_val
== VARYING
)
959 /* any M VARYING = VARYING. */
960 val1
->lattice_val
= VARYING
;
962 val1
->value
= NULL_TREE
;
964 else if (val1
->lattice_val
== CONSTANT
965 && val2
->lattice_val
== CONSTANT
966 && TREE_CODE (val1
->value
) == INTEGER_CST
967 && TREE_CODE (val2
->value
) == INTEGER_CST
)
969 /* Ci M Cj = Ci if (i == j)
970 Ci M Cj = VARYING if (i != j)
972 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
974 val1
->mask
= (val1
->mask
| val2
->mask
975 | (wi::to_widest (val1
->value
)
976 ^ wi::to_widest (val2
->value
)));
977 if (val1
->mask
== -1)
979 val1
->lattice_val
= VARYING
;
980 val1
->value
= NULL_TREE
;
983 else if (val1
->lattice_val
== CONSTANT
984 && val2
->lattice_val
== CONSTANT
985 && simple_cst_equal (val1
->value
, val2
->value
) == 1)
987 /* Ci M Cj = Ci if (i == j)
988 Ci M Cj = VARYING if (i != j)
990 VAL1 already contains the value we want for equivalent values. */
992 else if (val1
->lattice_val
== CONSTANT
993 && val2
->lattice_val
== CONSTANT
994 && (TREE_CODE (val1
->value
) == ADDR_EXPR
995 || TREE_CODE (val2
->value
) == ADDR_EXPR
))
997 /* When not equal addresses are involved try meeting for
999 prop_value_t tem
= *val2
;
1000 if (TREE_CODE (val1
->value
) == ADDR_EXPR
)
1001 *val1
= get_value_for_expr (val1
->value
, true);
1002 if (TREE_CODE (val2
->value
) == ADDR_EXPR
)
1003 tem
= get_value_for_expr (val2
->value
, true);
1004 ccp_lattice_meet (val1
, &tem
);
1008 /* Any other combination is VARYING. */
1009 val1
->lattice_val
= VARYING
;
1011 val1
->value
= NULL_TREE
;
1016 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
1017 lattice values to determine PHI_NODE's lattice value. The value of a
1018 PHI node is determined calling ccp_lattice_meet with all the arguments
1019 of the PHI node that are incoming via executable edges. */
1021 static enum ssa_prop_result
1022 ccp_visit_phi_node (gimple phi
)
1025 prop_value_t
*old_val
, new_val
;
1027 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1029 fprintf (dump_file
, "\nVisiting PHI node: ");
1030 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
1033 old_val
= get_value (gimple_phi_result (phi
));
1034 switch (old_val
->lattice_val
)
1037 return SSA_PROP_VARYING
;
1044 new_val
.lattice_val
= UNDEFINED
;
1045 new_val
.value
= NULL_TREE
;
1052 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1054 /* Compute the meet operator over all the PHI arguments flowing
1055 through executable edges. */
1056 edge e
= gimple_phi_arg_edge (phi
, i
);
1058 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1061 "\n Argument #%d (%d -> %d %sexecutable)\n",
1062 i
, e
->src
->index
, e
->dest
->index
,
1063 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
1066 /* If the incoming edge is executable, Compute the meet operator for
1067 the existing value of the PHI node and the current PHI argument. */
1068 if (e
->flags
& EDGE_EXECUTABLE
)
1070 tree arg
= gimple_phi_arg (phi
, i
)->def
;
1071 prop_value_t arg_val
= get_value_for_expr (arg
, false);
1073 ccp_lattice_meet (&new_val
, &arg_val
);
1075 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1077 fprintf (dump_file
, "\t");
1078 print_generic_expr (dump_file
, arg
, dump_flags
);
1079 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
1080 fprintf (dump_file
, "\n");
1083 if (new_val
.lattice_val
== VARYING
)
1088 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1090 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
1091 fprintf (dump_file
, "\n\n");
1094 /* Make the transition to the new value. */
1095 if (set_lattice_value (gimple_phi_result (phi
), new_val
))
1097 if (new_val
.lattice_val
== VARYING
)
1098 return SSA_PROP_VARYING
;
1100 return SSA_PROP_INTERESTING
;
1103 return SSA_PROP_NOT_INTERESTING
;
1106 /* Return the constant value for OP or OP otherwise. */
1109 valueize_op (tree op
)
1111 if (TREE_CODE (op
) == SSA_NAME
)
1113 tree tem
= get_constant_value (op
);
1120 /* CCP specific front-end to the non-destructive constant folding
1123 Attempt to simplify the RHS of STMT knowing that one or more
1124 operands are constants.
1126 If simplification is possible, return the simplified RHS,
1127 otherwise return the original RHS or NULL_TREE. */
1130 ccp_fold (gimple stmt
)
1132 location_t loc
= gimple_location (stmt
);
1133 switch (gimple_code (stmt
))
1137 /* Handle comparison operators that can appear in GIMPLE form. */
1138 tree op0
= valueize_op (gimple_cond_lhs (stmt
));
1139 tree op1
= valueize_op (gimple_cond_rhs (stmt
));
1140 enum tree_code code
= gimple_cond_code (stmt
);
1141 return fold_binary_loc (loc
, code
, boolean_type_node
, op0
, op1
);
1146 /* Return the constant switch index. */
1147 return valueize_op (gimple_switch_index (stmt
));
1152 return gimple_fold_stmt_to_constant_1 (stmt
, valueize_op
);
1159 /* Apply the operation CODE in type TYPE to the value, mask pair
1160 RVAL and RMASK representing a value of type RTYPE and set
1161 the value, mask pair *VAL and *MASK to the result. */
1164 bit_value_unop_1 (enum tree_code code
, tree type
,
1165 widest_int
*val
, widest_int
*mask
,
1166 tree rtype
, const widest_int
&rval
, const widest_int
&rmask
)
1177 widest_int temv
, temm
;
1178 /* Return ~rval + 1. */
1179 bit_value_unop_1 (BIT_NOT_EXPR
, type
, &temv
, &temm
, type
, rval
, rmask
);
1180 bit_value_binop_1 (PLUS_EXPR
, type
, val
, mask
,
1181 type
, temv
, temm
, type
, 1, 0);
1189 /* First extend mask and value according to the original type. */
1190 sgn
= TYPE_SIGN (rtype
);
1191 *mask
= wi::ext (rmask
, TYPE_PRECISION (rtype
), sgn
);
1192 *val
= wi::ext (rval
, TYPE_PRECISION (rtype
), sgn
);
1194 /* Then extend mask and value according to the target type. */
1195 sgn
= TYPE_SIGN (type
);
1196 *mask
= wi::ext (*mask
, TYPE_PRECISION (type
), sgn
);
1197 *val
= wi::ext (*val
, TYPE_PRECISION (type
), sgn
);
1207 /* Apply the operation CODE in type TYPE to the value, mask pairs
1208 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1209 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1212 bit_value_binop_1 (enum tree_code code
, tree type
,
1213 widest_int
*val
, widest_int
*mask
,
1214 tree r1type
, const widest_int
&r1val
,
1215 const widest_int
&r1mask
, tree r2type
,
1216 const widest_int
&r2val
, const widest_int
&r2mask
)
1218 signop sgn
= TYPE_SIGN (type
);
1219 int width
= TYPE_PRECISION (type
);
1220 bool swap_p
= false;
1222 /* Assume we'll get a constant result. Use an initial non varying
1223 value, we fall back to varying in the end if necessary. */
1229 /* The mask is constant where there is a known not
1230 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1231 *mask
= (r1mask
| r2mask
) & (r1val
| r1mask
) & (r2val
| r2mask
);
1232 *val
= r1val
& r2val
;
1236 /* The mask is constant where there is a known
1237 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1238 *mask
= (r1mask
| r2mask
)
1239 .and_not (r1val
.and_not (r1mask
) | r2val
.and_not (r2mask
));
1240 *val
= r1val
| r2val
;
1245 *mask
= r1mask
| r2mask
;
1246 *val
= r1val
^ r2val
;
1253 widest_int shift
= r2val
;
1261 if (wi::neg_p (shift
))
1264 if (code
== RROTATE_EXPR
)
1265 code
= LROTATE_EXPR
;
1267 code
= RROTATE_EXPR
;
1269 if (code
== RROTATE_EXPR
)
1271 *mask
= wi::rrotate (r1mask
, shift
, width
);
1272 *val
= wi::rrotate (r1val
, shift
, width
);
1276 *mask
= wi::lrotate (r1mask
, shift
, width
);
1277 *val
= wi::lrotate (r1val
, shift
, width
);
1285 /* ??? We can handle partially known shift counts if we know
1286 its sign. That way we can tell that (x << (y | 8)) & 255
1290 widest_int shift
= r2val
;
1298 if (wi::neg_p (shift
))
1301 if (code
== RSHIFT_EXPR
)
1306 if (code
== RSHIFT_EXPR
)
1308 *mask
= wi::rshift (wi::ext (r1mask
, width
, sgn
), shift
, sgn
);
1309 *val
= wi::rshift (wi::ext (r1val
, width
, sgn
), shift
, sgn
);
1313 *mask
= wi::ext (wi::lshift (r1mask
, shift
), width
, sgn
);
1314 *val
= wi::ext (wi::lshift (r1val
, shift
), width
, sgn
);
1321 case POINTER_PLUS_EXPR
:
1323 /* Do the addition with unknown bits set to zero, to give carry-ins of
1324 zero wherever possible. */
1325 widest_int lo
= r1val
.and_not (r1mask
) + r2val
.and_not (r2mask
);
1326 lo
= wi::ext (lo
, width
, sgn
);
1327 /* Do the addition with unknown bits set to one, to give carry-ins of
1328 one wherever possible. */
1329 widest_int hi
= (r1val
| r1mask
) + (r2val
| r2mask
);
1330 hi
= wi::ext (hi
, width
, sgn
);
1331 /* Each bit in the result is known if (a) the corresponding bits in
1332 both inputs are known, and (b) the carry-in to that bit position
1333 is known. We can check condition (b) by seeing if we got the same
1334 result with minimised carries as with maximised carries. */
1335 *mask
= r1mask
| r2mask
| (lo
^ hi
);
1336 *mask
= wi::ext (*mask
, width
, sgn
);
1337 /* It shouldn't matter whether we choose lo or hi here. */
1344 widest_int temv
, temm
;
1345 bit_value_unop_1 (NEGATE_EXPR
, r2type
, &temv
, &temm
,
1346 r2type
, r2val
, r2mask
);
1347 bit_value_binop_1 (PLUS_EXPR
, type
, val
, mask
,
1348 r1type
, r1val
, r1mask
,
1349 r2type
, temv
, temm
);
1355 /* Just track trailing zeros in both operands and transfer
1356 them to the other. */
1357 int r1tz
= wi::ctz (r1val
| r1mask
);
1358 int r2tz
= wi::ctz (r2val
| r2mask
);
1359 if (r1tz
+ r2tz
>= width
)
1364 else if (r1tz
+ r2tz
> 0)
1366 *mask
= wi::ext (wi::mask
<widest_int
> (r1tz
+ r2tz
, true),
1376 widest_int m
= r1mask
| r2mask
;
1377 if (r1val
.and_not (m
) != r2val
.and_not (m
))
1380 *val
= ((code
== EQ_EXPR
) ? 0 : 1);
1384 /* We know the result of a comparison is always one or zero. */
1394 code
= swap_tree_comparison (code
);
1401 const widest_int
&o1val
= swap_p
? r2val
: r1val
;
1402 const widest_int
&o1mask
= swap_p
? r2mask
: r1mask
;
1403 const widest_int
&o2val
= swap_p
? r1val
: r2val
;
1404 const widest_int
&o2mask
= swap_p
? r1mask
: r2mask
;
1406 /* If the most significant bits are not known we know nothing. */
1407 if (wi::neg_p (o1mask
) || wi::neg_p (o2mask
))
1410 /* For comparisons the signedness is in the comparison operands. */
1411 sgn
= TYPE_SIGN (r1type
);
1413 /* If we know the most significant bits we know the values
1414 value ranges by means of treating varying bits as zero
1415 or one. Do a cross comparison of the max/min pairs. */
1416 maxmin
= wi::cmp (o1val
| o1mask
, o2val
.and_not (o2mask
), sgn
);
1417 minmax
= wi::cmp (o1val
.and_not (o1mask
), o2val
| o2mask
, sgn
);
1418 if (maxmin
< 0) /* o1 is less than o2. */
1423 else if (minmax
> 0) /* o1 is not less or equal to o2. */
1428 else if (maxmin
== minmax
) /* o1 and o2 are equal. */
1430 /* This probably should never happen as we'd have
1431 folded the thing during fully constant value folding. */
1433 *val
= (code
== LE_EXPR
? 1 : 0);
1437 /* We know the result of a comparison is always one or zero. */
1448 /* Return the propagation value when applying the operation CODE to
1449 the value RHS yielding type TYPE. */
1452 bit_value_unop (enum tree_code code
, tree type
, tree rhs
)
1454 prop_value_t rval
= get_value_for_expr (rhs
, true);
1455 widest_int value
, mask
;
1458 if (rval
.lattice_val
== UNDEFINED
)
1461 gcc_assert ((rval
.lattice_val
== CONSTANT
1462 && TREE_CODE (rval
.value
) == INTEGER_CST
)
1463 || rval
.mask
== -1);
1464 bit_value_unop_1 (code
, type
, &value
, &mask
,
1465 TREE_TYPE (rhs
), value_to_wide_int (rval
), rval
.mask
);
1468 val
.lattice_val
= CONSTANT
;
1470 /* ??? Delay building trees here. */
1471 val
.value
= wide_int_to_tree (type
, value
);
1475 val
.lattice_val
= VARYING
;
1476 val
.value
= NULL_TREE
;
1482 /* Return the propagation value when applying the operation CODE to
1483 the values RHS1 and RHS2 yielding type TYPE. */
1486 bit_value_binop (enum tree_code code
, tree type
, tree rhs1
, tree rhs2
)
1488 prop_value_t r1val
= get_value_for_expr (rhs1
, true);
1489 prop_value_t r2val
= get_value_for_expr (rhs2
, true);
1490 widest_int value
, mask
;
1493 if (r1val
.lattice_val
== UNDEFINED
1494 || r2val
.lattice_val
== UNDEFINED
)
1496 val
.lattice_val
= VARYING
;
1497 val
.value
= NULL_TREE
;
1502 gcc_assert ((r1val
.lattice_val
== CONSTANT
1503 && TREE_CODE (r1val
.value
) == INTEGER_CST
)
1504 || r1val
.mask
== -1);
1505 gcc_assert ((r2val
.lattice_val
== CONSTANT
1506 && TREE_CODE (r2val
.value
) == INTEGER_CST
)
1507 || r2val
.mask
== -1);
1508 bit_value_binop_1 (code
, type
, &value
, &mask
,
1509 TREE_TYPE (rhs1
), value_to_wide_int (r1val
), r1val
.mask
,
1510 TREE_TYPE (rhs2
), value_to_wide_int (r2val
), r2val
.mask
);
1513 val
.lattice_val
= CONSTANT
;
1515 /* ??? Delay building trees here. */
1516 val
.value
= wide_int_to_tree (type
, value
);
1520 val
.lattice_val
= VARYING
;
1521 val
.value
= NULL_TREE
;
1527 /* Return the propagation value for __builtin_assume_aligned
1528 and functions with assume_aligned or alloc_aligned attribute.
1529 For __builtin_assume_aligned, ATTR is NULL_TREE,
1530 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1531 is false, for alloc_aligned attribute ATTR is non-NULL and
1532 ALLOC_ALIGNED is true. */
1535 bit_value_assume_aligned (gimple stmt
, tree attr
, prop_value_t ptrval
,
1538 tree align
, misalign
= NULL_TREE
, type
;
1539 unsigned HOST_WIDE_INT aligni
, misaligni
= 0;
1540 prop_value_t alignval
;
1541 widest_int value
, mask
;
1544 if (attr
== NULL_TREE
)
1546 tree ptr
= gimple_call_arg (stmt
, 0);
1547 type
= TREE_TYPE (ptr
);
1548 ptrval
= get_value_for_expr (ptr
, true);
1552 tree lhs
= gimple_call_lhs (stmt
);
1553 type
= TREE_TYPE (lhs
);
1556 if (ptrval
.lattice_val
== UNDEFINED
)
1558 gcc_assert ((ptrval
.lattice_val
== CONSTANT
1559 && TREE_CODE (ptrval
.value
) == INTEGER_CST
)
1560 || ptrval
.mask
== -1);
1561 if (attr
== NULL_TREE
)
1563 /* Get aligni and misaligni from __builtin_assume_aligned. */
1564 align
= gimple_call_arg (stmt
, 1);
1565 if (!tree_fits_uhwi_p (align
))
1567 aligni
= tree_to_uhwi (align
);
1568 if (gimple_call_num_args (stmt
) > 2)
1570 misalign
= gimple_call_arg (stmt
, 2);
1571 if (!tree_fits_uhwi_p (misalign
))
1573 misaligni
= tree_to_uhwi (misalign
);
1578 /* Get aligni and misaligni from assume_aligned or
1579 alloc_align attributes. */
1580 if (TREE_VALUE (attr
) == NULL_TREE
)
1582 attr
= TREE_VALUE (attr
);
1583 align
= TREE_VALUE (attr
);
1584 if (!tree_fits_uhwi_p (align
))
1586 aligni
= tree_to_uhwi (align
);
1589 if (aligni
== 0 || aligni
> gimple_call_num_args (stmt
))
1591 align
= gimple_call_arg (stmt
, aligni
- 1);
1592 if (!tree_fits_uhwi_p (align
))
1594 aligni
= tree_to_uhwi (align
);
1596 else if (TREE_CHAIN (attr
) && TREE_VALUE (TREE_CHAIN (attr
)))
1598 misalign
= TREE_VALUE (TREE_CHAIN (attr
));
1599 if (!tree_fits_uhwi_p (misalign
))
1601 misaligni
= tree_to_uhwi (misalign
);
1604 if (aligni
<= 1 || (aligni
& (aligni
- 1)) != 0 || misaligni
>= aligni
)
1607 align
= build_int_cst_type (type
, -aligni
);
1608 alignval
= get_value_for_expr (align
, true);
1609 bit_value_binop_1 (BIT_AND_EXPR
, type
, &value
, &mask
,
1610 type
, value_to_wide_int (ptrval
), ptrval
.mask
,
1611 type
, value_to_wide_int (alignval
), alignval
.mask
);
1614 val
.lattice_val
= CONSTANT
;
1616 gcc_assert ((mask
.to_uhwi () & (aligni
- 1)) == 0);
1617 gcc_assert ((value
.to_uhwi () & (aligni
- 1)) == 0);
1619 /* ??? Delay building trees here. */
1620 val
.value
= wide_int_to_tree (type
, value
);
1624 val
.lattice_val
= VARYING
;
1625 val
.value
= NULL_TREE
;
1631 /* Evaluate statement STMT.
1632 Valid only for assignments, calls, conditionals, and switches. */
1635 evaluate_stmt (gimple stmt
)
1638 tree simplified
= NULL_TREE
;
1639 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1640 bool is_constant
= false;
1643 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1645 fprintf (dump_file
, "which is likely ");
1646 switch (likelyvalue
)
1649 fprintf (dump_file
, "CONSTANT");
1652 fprintf (dump_file
, "UNDEFINED");
1655 fprintf (dump_file
, "VARYING");
1659 fprintf (dump_file
, "\n");
1662 /* If the statement is likely to have a CONSTANT result, then try
1663 to fold the statement to determine the constant value. */
1664 /* FIXME. This is the only place that we call ccp_fold.
1665 Since likely_value never returns CONSTANT for calls, we will
1666 not attempt to fold them, including builtins that may profit. */
1667 if (likelyvalue
== CONSTANT
)
1669 fold_defer_overflow_warnings ();
1670 simplified
= ccp_fold (stmt
);
1671 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1672 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1675 /* The statement produced a constant value. */
1676 val
.lattice_val
= CONSTANT
;
1677 val
.value
= simplified
;
1681 /* If the statement is likely to have a VARYING result, then do not
1682 bother folding the statement. */
1683 else if (likelyvalue
== VARYING
)
1685 enum gimple_code code
= gimple_code (stmt
);
1686 if (code
== GIMPLE_ASSIGN
)
1688 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1690 /* Other cases cannot satisfy is_gimple_min_invariant
1692 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1693 simplified
= gimple_assign_rhs1 (stmt
);
1695 else if (code
== GIMPLE_SWITCH
)
1696 simplified
= gimple_switch_index (stmt
);
1698 /* These cannot satisfy is_gimple_min_invariant without folding. */
1699 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1700 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1703 /* The statement produced a constant value. */
1704 val
.lattice_val
= CONSTANT
;
1705 val
.value
= simplified
;
1710 /* Resort to simplification for bitwise tracking. */
1711 if (flag_tree_bit_ccp
1712 && (likelyvalue
== CONSTANT
|| is_gimple_call (stmt
))
1715 enum gimple_code code
= gimple_code (stmt
);
1716 val
.lattice_val
= VARYING
;
1717 val
.value
= NULL_TREE
;
1719 if (code
== GIMPLE_ASSIGN
)
1721 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1722 tree rhs1
= gimple_assign_rhs1 (stmt
);
1723 switch (get_gimple_rhs_class (subcode
))
1725 case GIMPLE_SINGLE_RHS
:
1726 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1727 || POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1728 val
= get_value_for_expr (rhs1
, true);
1731 case GIMPLE_UNARY_RHS
:
1732 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1733 || POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1734 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt
))
1735 || POINTER_TYPE_P (gimple_expr_type (stmt
))))
1736 val
= bit_value_unop (subcode
, gimple_expr_type (stmt
), rhs1
);
1739 case GIMPLE_BINARY_RHS
:
1740 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1741 || POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1743 tree lhs
= gimple_assign_lhs (stmt
);
1744 tree rhs2
= gimple_assign_rhs2 (stmt
);
1745 val
= bit_value_binop (subcode
,
1746 TREE_TYPE (lhs
), rhs1
, rhs2
);
1753 else if (code
== GIMPLE_COND
)
1755 enum tree_code code
= gimple_cond_code (stmt
);
1756 tree rhs1
= gimple_cond_lhs (stmt
);
1757 tree rhs2
= gimple_cond_rhs (stmt
);
1758 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1759 || POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1760 val
= bit_value_binop (code
, TREE_TYPE (rhs1
), rhs1
, rhs2
);
1762 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
1764 tree fndecl
= gimple_call_fndecl (stmt
);
1765 switch (DECL_FUNCTION_CODE (fndecl
))
1767 case BUILT_IN_MALLOC
:
1768 case BUILT_IN_REALLOC
:
1769 case BUILT_IN_CALLOC
:
1770 case BUILT_IN_STRDUP
:
1771 case BUILT_IN_STRNDUP
:
1772 val
.lattice_val
= CONSTANT
;
1773 val
.value
= build_int_cst (TREE_TYPE (gimple_get_lhs (stmt
)), 0);
1774 val
.mask
= ~((HOST_WIDE_INT
) MALLOC_ABI_ALIGNMENT
1775 / BITS_PER_UNIT
- 1);
1778 case BUILT_IN_ALLOCA
:
1779 case BUILT_IN_ALLOCA_WITH_ALIGN
:
1780 align
= (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
1781 ? TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1))
1782 : BIGGEST_ALIGNMENT
);
1783 val
.lattice_val
= CONSTANT
;
1784 val
.value
= build_int_cst (TREE_TYPE (gimple_get_lhs (stmt
)), 0);
1785 val
.mask
= ~((HOST_WIDE_INT
) align
/ BITS_PER_UNIT
- 1);
1788 /* These builtins return their first argument, unmodified. */
1789 case BUILT_IN_MEMCPY
:
1790 case BUILT_IN_MEMMOVE
:
1791 case BUILT_IN_MEMSET
:
1792 case BUILT_IN_STRCPY
:
1793 case BUILT_IN_STRNCPY
:
1794 case BUILT_IN_MEMCPY_CHK
:
1795 case BUILT_IN_MEMMOVE_CHK
:
1796 case BUILT_IN_MEMSET_CHK
:
1797 case BUILT_IN_STRCPY_CHK
:
1798 case BUILT_IN_STRNCPY_CHK
:
1799 val
= get_value_for_expr (gimple_call_arg (stmt
, 0), true);
1802 case BUILT_IN_ASSUME_ALIGNED
:
1803 val
= bit_value_assume_aligned (stmt
, NULL_TREE
, val
, false);
1806 case BUILT_IN_ALIGNED_ALLOC
:
1808 tree align
= get_constant_value (gimple_call_arg (stmt
, 0));
1810 && tree_fits_uhwi_p (align
))
1812 unsigned HOST_WIDE_INT aligni
= tree_to_uhwi (align
);
1814 /* align must be power-of-two */
1815 && (aligni
& (aligni
- 1)) == 0)
1817 val
.lattice_val
= CONSTANT
;
1818 val
.value
= build_int_cst (ptr_type_node
, 0);
1828 if (is_gimple_call (stmt
) && gimple_call_lhs (stmt
))
1830 tree fntype
= gimple_call_fntype (stmt
);
1833 tree attrs
= lookup_attribute ("assume_aligned",
1834 TYPE_ATTRIBUTES (fntype
));
1836 val
= bit_value_assume_aligned (stmt
, attrs
, val
, false);
1837 attrs
= lookup_attribute ("alloc_align",
1838 TYPE_ATTRIBUTES (fntype
));
1840 val
= bit_value_assume_aligned (stmt
, attrs
, val
, true);
1843 is_constant
= (val
.lattice_val
== CONSTANT
);
1846 if (flag_tree_bit_ccp
1847 && ((is_constant
&& TREE_CODE (val
.value
) == INTEGER_CST
)
1848 || (!is_constant
&& likelyvalue
!= UNDEFINED
))
1849 && gimple_get_lhs (stmt
)
1850 && TREE_CODE (gimple_get_lhs (stmt
)) == SSA_NAME
)
1852 tree lhs
= gimple_get_lhs (stmt
);
1853 wide_int nonzero_bits
= get_nonzero_bits (lhs
);
1854 if (nonzero_bits
!= -1)
1858 val
.lattice_val
= CONSTANT
;
1859 val
.value
= build_zero_cst (TREE_TYPE (lhs
));
1860 val
.mask
= extend_mask (nonzero_bits
);
1865 if (wi::bit_and_not (val
.value
, nonzero_bits
) != 0)
1866 val
.value
= wide_int_to_tree (TREE_TYPE (lhs
),
1867 nonzero_bits
& val
.value
);
1868 if (nonzero_bits
== 0)
1871 val
.mask
= val
.mask
& extend_mask (nonzero_bits
);
1878 /* The statement produced a nonconstant value. If the statement
1879 had UNDEFINED operands, then the result of the statement
1880 should be UNDEFINED. Otherwise, the statement is VARYING. */
1881 if (likelyvalue
== UNDEFINED
)
1883 val
.lattice_val
= likelyvalue
;
1888 val
.lattice_val
= VARYING
;
1892 val
.value
= NULL_TREE
;
1898 typedef hash_table
<pointer_hash
<gimple_statement_base
> > gimple_htab
;
1900 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1901 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1904 insert_clobber_before_stack_restore (tree saved_val
, tree var
,
1905 gimple_htab
*visited
)
1907 gimple stmt
, clobber_stmt
;
1909 imm_use_iterator iter
;
1910 gimple_stmt_iterator i
;
1913 FOR_EACH_IMM_USE_STMT (stmt
, iter
, saved_val
)
1914 if (gimple_call_builtin_p (stmt
, BUILT_IN_STACK_RESTORE
))
1916 clobber
= build_constructor (TREE_TYPE (var
),
1918 TREE_THIS_VOLATILE (clobber
) = 1;
1919 clobber_stmt
= gimple_build_assign (var
, clobber
);
1921 i
= gsi_for_stmt (stmt
);
1922 gsi_insert_before (&i
, clobber_stmt
, GSI_SAME_STMT
);
1924 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1926 if (!visited
->is_created ())
1927 visited
->create (10);
1929 slot
= visited
->find_slot (stmt
, INSERT
);
1934 insert_clobber_before_stack_restore (gimple_phi_result (stmt
), var
,
1937 else if (gimple_assign_ssa_name_copy_p (stmt
))
1938 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt
), var
,
1941 gcc_assert (is_gimple_debug (stmt
));
1944 /* Advance the iterator to the previous non-debug gimple statement in the same
1945 or dominating basic block. */
1948 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator
*i
)
1952 gsi_prev_nondebug (i
);
1953 while (gsi_end_p (*i
))
1955 dom
= get_immediate_dominator (CDI_DOMINATORS
, i
->bb
);
1956 if (dom
== NULL
|| dom
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1959 *i
= gsi_last_bb (dom
);
1963 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1964 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
1966 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
1967 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
1968 that case the function gives up without inserting the clobbers. */
1971 insert_clobbers_for_var (gimple_stmt_iterator i
, tree var
)
1975 gimple_htab visited
;
1977 for (; !gsi_end_p (i
); gsi_prev_dom_bb_nondebug (&i
))
1979 stmt
= gsi_stmt (i
);
1981 if (!gimple_call_builtin_p (stmt
, BUILT_IN_STACK_SAVE
))
1984 saved_val
= gimple_call_lhs (stmt
);
1985 if (saved_val
== NULL_TREE
)
1988 insert_clobber_before_stack_restore (saved_val
, var
, &visited
);
1992 if (visited
.is_created ())
1996 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
1997 fixed-size array and returns the address, if found, otherwise returns
2001 fold_builtin_alloca_with_align (gimple stmt
)
2003 unsigned HOST_WIDE_INT size
, threshold
, n_elem
;
2004 tree lhs
, arg
, block
, var
, elem_type
, array_type
;
2007 lhs
= gimple_call_lhs (stmt
);
2008 if (lhs
== NULL_TREE
)
2011 /* Detect constant argument. */
2012 arg
= get_constant_value (gimple_call_arg (stmt
, 0));
2013 if (arg
== NULL_TREE
2014 || TREE_CODE (arg
) != INTEGER_CST
2015 || !tree_fits_uhwi_p (arg
))
2018 size
= tree_to_uhwi (arg
);
2020 /* Heuristic: don't fold large allocas. */
2021 threshold
= (unsigned HOST_WIDE_INT
)PARAM_VALUE (PARAM_LARGE_STACK_FRAME
);
2022 /* In case the alloca is located at function entry, it has the same lifetime
2023 as a declared array, so we allow a larger size. */
2024 block
= gimple_block (stmt
);
2025 if (!(cfun
->after_inlining
2026 && TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
))
2028 if (size
> threshold
)
2031 /* Declare array. */
2032 elem_type
= build_nonstandard_integer_type (BITS_PER_UNIT
, 1);
2033 n_elem
= size
* 8 / BITS_PER_UNIT
;
2034 array_type
= build_array_type_nelts (elem_type
, n_elem
);
2035 var
= create_tmp_var (array_type
, NULL
);
2036 DECL_ALIGN (var
) = TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1));
2038 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (lhs
);
2039 if (pi
!= NULL
&& !pi
->pt
.anything
)
2043 singleton_p
= pt_solution_singleton_p (&pi
->pt
, &uid
);
2044 gcc_assert (singleton_p
);
2045 SET_DECL_PT_UID (var
, uid
);
2049 /* Fold alloca to the address of the array. */
2050 return fold_convert (TREE_TYPE (lhs
), build_fold_addr_expr (var
));
2053 /* Fold the stmt at *GSI with CCP specific information that propagating
2054 and regular folding does not catch. */
2057 ccp_fold_stmt (gimple_stmt_iterator
*gsi
)
2059 gimple stmt
= gsi_stmt (*gsi
);
2061 switch (gimple_code (stmt
))
2066 /* Statement evaluation will handle type mismatches in constants
2067 more gracefully than the final propagation. This allows us to
2068 fold more conditionals here. */
2069 val
= evaluate_stmt (stmt
);
2070 if (val
.lattice_val
!= CONSTANT
2076 fprintf (dump_file
, "Folding predicate ");
2077 print_gimple_expr (dump_file
, stmt
, 0, 0);
2078 fprintf (dump_file
, " to ");
2079 print_generic_expr (dump_file
, val
.value
, 0);
2080 fprintf (dump_file
, "\n");
2083 if (integer_zerop (val
.value
))
2084 gimple_cond_make_false (stmt
);
2086 gimple_cond_make_true (stmt
);
2093 tree lhs
= gimple_call_lhs (stmt
);
2094 int flags
= gimple_call_flags (stmt
);
2097 bool changed
= false;
2100 /* If the call was folded into a constant make sure it goes
2101 away even if we cannot propagate into all uses because of
2104 && TREE_CODE (lhs
) == SSA_NAME
2105 && (val
= get_constant_value (lhs
))
2106 /* Don't optimize away calls that have side-effects. */
2107 && (flags
& (ECF_CONST
|ECF_PURE
)) != 0
2108 && (flags
& ECF_LOOPING_CONST_OR_PURE
) == 0)
2110 tree new_rhs
= unshare_expr (val
);
2112 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
2113 TREE_TYPE (new_rhs
)))
2114 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
2115 res
= update_call_from_tree (gsi
, new_rhs
);
2120 /* Internal calls provide no argument types, so the extra laxity
2121 for normal calls does not apply. */
2122 if (gimple_call_internal_p (stmt
))
2125 /* The heuristic of fold_builtin_alloca_with_align differs before and
2126 after inlining, so we don't require the arg to be changed into a
2127 constant for folding, but just to be constant. */
2128 if (gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
2130 tree new_rhs
= fold_builtin_alloca_with_align (stmt
);
2133 bool res
= update_call_from_tree (gsi
, new_rhs
);
2134 tree var
= TREE_OPERAND (TREE_OPERAND (new_rhs
, 0),0);
2136 insert_clobbers_for_var (*gsi
, var
);
2141 /* Propagate into the call arguments. Compared to replace_uses_in
2142 this can use the argument slot types for type verification
2143 instead of the current argument type. We also can safely
2144 drop qualifiers here as we are dealing with constants anyway. */
2145 argt
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
2146 for (i
= 0; i
< gimple_call_num_args (stmt
) && argt
;
2147 ++i
, argt
= TREE_CHAIN (argt
))
2149 tree arg
= gimple_call_arg (stmt
, i
);
2150 if (TREE_CODE (arg
) == SSA_NAME
2151 && (val
= get_constant_value (arg
))
2152 && useless_type_conversion_p
2153 (TYPE_MAIN_VARIANT (TREE_VALUE (argt
)),
2154 TYPE_MAIN_VARIANT (TREE_TYPE (val
))))
2156 gimple_call_set_arg (stmt
, i
, unshare_expr (val
));
2166 tree lhs
= gimple_assign_lhs (stmt
);
2169 /* If we have a load that turned out to be constant replace it
2170 as we cannot propagate into all uses in all cases. */
2171 if (gimple_assign_single_p (stmt
)
2172 && TREE_CODE (lhs
) == SSA_NAME
2173 && (val
= get_constant_value (lhs
)))
2175 tree rhs
= unshare_expr (val
);
2176 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2177 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
2178 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
2190 /* Visit the assignment statement STMT. Set the value of its LHS to the
2191 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2192 creates virtual definitions, set the value of each new name to that
2193 of the RHS (if we can derive a constant out of the RHS).
2194 Value-returning call statements also perform an assignment, and
2195 are handled here. */
2197 static enum ssa_prop_result
2198 visit_assignment (gimple stmt
, tree
*output_p
)
2201 enum ssa_prop_result retval
;
2203 tree lhs
= gimple_get_lhs (stmt
);
2205 gcc_assert (gimple_code (stmt
) != GIMPLE_CALL
2206 || gimple_call_lhs (stmt
) != NULL_TREE
);
2208 if (gimple_assign_single_p (stmt
)
2209 && gimple_assign_rhs_code (stmt
) == SSA_NAME
)
2210 /* For a simple copy operation, we copy the lattice values. */
2211 val
= *get_value (gimple_assign_rhs1 (stmt
));
2213 /* Evaluate the statement, which could be
2214 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2215 val
= evaluate_stmt (stmt
);
2217 retval
= SSA_PROP_NOT_INTERESTING
;
2219 /* Set the lattice value of the statement's output. */
2220 if (TREE_CODE (lhs
) == SSA_NAME
)
2222 /* If STMT is an assignment to an SSA_NAME, we only have one
2224 if (set_lattice_value (lhs
, val
))
2227 if (val
.lattice_val
== VARYING
)
2228 retval
= SSA_PROP_VARYING
;
2230 retval
= SSA_PROP_INTERESTING
;
2238 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2239 if it can determine which edge will be taken. Otherwise, return
2240 SSA_PROP_VARYING. */
2242 static enum ssa_prop_result
2243 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
2248 block
= gimple_bb (stmt
);
2249 val
= evaluate_stmt (stmt
);
2250 if (val
.lattice_val
!= CONSTANT
2252 return SSA_PROP_VARYING
;
2254 /* Find which edge out of the conditional block will be taken and add it
2255 to the worklist. If no single edge can be determined statically,
2256 return SSA_PROP_VARYING to feed all the outgoing edges to the
2257 propagation engine. */
2258 *taken_edge_p
= find_taken_edge (block
, val
.value
);
2260 return SSA_PROP_INTERESTING
;
2262 return SSA_PROP_VARYING
;
2266 /* Evaluate statement STMT. If the statement produces an output value and
2267 its evaluation changes the lattice value of its output, return
2268 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2271 If STMT is a conditional branch and we can determine its truth
2272 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2273 value, return SSA_PROP_VARYING. */
2275 static enum ssa_prop_result
2276 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
2281 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2283 fprintf (dump_file
, "\nVisiting statement:\n");
2284 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2287 switch (gimple_code (stmt
))
2290 /* If the statement is an assignment that produces a single
2291 output value, evaluate its RHS to see if the lattice value of
2292 its output has changed. */
2293 return visit_assignment (stmt
, output_p
);
2296 /* A value-returning call also performs an assignment. */
2297 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2298 return visit_assignment (stmt
, output_p
);
2303 /* If STMT is a conditional branch, see if we can determine
2304 which branch will be taken. */
2305 /* FIXME. It appears that we should be able to optimize
2306 computed GOTOs here as well. */
2307 return visit_cond_stmt (stmt
, taken_edge_p
);
2313 /* Any other kind of statement is not interesting for constant
2314 propagation and, therefore, not worth simulating. */
2315 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2316 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
2318 /* Definitions made by statements other than assignments to
2319 SSA_NAMEs represent unknown modifications to their outputs.
2320 Mark them VARYING. */
2321 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2323 prop_value_t v
= { VARYING
, NULL_TREE
, -1 };
2324 set_lattice_value (def
, v
);
2327 return SSA_PROP_VARYING
;
2331 /* Main entry point for SSA Conditional Constant Propagation. */
2336 unsigned int todo
= 0;
2337 calculate_dominance_info (CDI_DOMINATORS
);
2339 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
2340 if (ccp_finalize ())
2341 todo
= (TODO_cleanup_cfg
| TODO_update_ssa
);
2342 free_dominance_info (CDI_DOMINATORS
);
2349 const pass_data pass_data_ccp
=
2351 GIMPLE_PASS
, /* type */
2353 OPTGROUP_NONE
, /* optinfo_flags */
2354 true, /* has_execute */
2355 TV_TREE_CCP
, /* tv_id */
2356 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2357 0, /* properties_provided */
2358 0, /* properties_destroyed */
2359 0, /* todo_flags_start */
2360 TODO_update_address_taken
, /* todo_flags_finish */
2363 class pass_ccp
: public gimple_opt_pass
2366 pass_ccp (gcc::context
*ctxt
)
2367 : gimple_opt_pass (pass_data_ccp
, ctxt
)
2370 /* opt_pass methods: */
2371 opt_pass
* clone () { return new pass_ccp (m_ctxt
); }
2372 virtual bool gate (function
*) { return flag_tree_ccp
!= 0; }
2373 virtual unsigned int execute (function
*) { return do_ssa_ccp (); }
2375 }; // class pass_ccp
2380 make_pass_ccp (gcc::context
*ctxt
)
2382 return new pass_ccp (ctxt
);
2387 /* Try to optimize out __builtin_stack_restore. Optimize it out
2388 if there is another __builtin_stack_restore in the same basic
2389 block and no calls or ASM_EXPRs are in between, or if this block's
2390 only outgoing edge is to EXIT_BLOCK and there are no calls or
2391 ASM_EXPRs after this __builtin_stack_restore. */
2394 optimize_stack_restore (gimple_stmt_iterator i
)
2399 basic_block bb
= gsi_bb (i
);
2400 gimple call
= gsi_stmt (i
);
2402 if (gimple_code (call
) != GIMPLE_CALL
2403 || gimple_call_num_args (call
) != 1
2404 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
2405 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
2408 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
2410 stmt
= gsi_stmt (i
);
2411 if (gimple_code (stmt
) == GIMPLE_ASM
)
2413 if (gimple_code (stmt
) != GIMPLE_CALL
)
2416 callee
= gimple_call_fndecl (stmt
);
2418 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2419 /* All regular builtins are ok, just obviously not alloca. */
2420 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
2421 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA_WITH_ALIGN
)
2424 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
2425 goto second_stack_restore
;
2431 /* Allow one successor of the exit block, or zero successors. */
2432 switch (EDGE_COUNT (bb
->succs
))
2437 if (single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2443 second_stack_restore
:
2445 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2446 If there are multiple uses, then the last one should remove the call.
2447 In any case, whether the call to __builtin_stack_save can be removed
2448 or not is irrelevant to removing the call to __builtin_stack_restore. */
2449 if (has_single_use (gimple_call_arg (call
, 0)))
2451 gimple stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
2452 if (is_gimple_call (stack_save
))
2454 callee
= gimple_call_fndecl (stack_save
);
2456 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
2457 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
)
2459 gimple_stmt_iterator stack_save_gsi
;
2462 stack_save_gsi
= gsi_for_stmt (stack_save
);
2463 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
2464 update_call_from_tree (&stack_save_gsi
, rhs
);
2469 /* No effect, so the statement will be deleted. */
2470 return integer_zero_node
;
2473 /* If va_list type is a simple pointer and nothing special is needed,
2474 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2475 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2476 pointer assignment. */
2479 optimize_stdarg_builtin (gimple call
)
2481 tree callee
, lhs
, rhs
, cfun_va_list
;
2482 bool va_list_simple_ptr
;
2483 location_t loc
= gimple_location (call
);
2485 if (gimple_code (call
) != GIMPLE_CALL
)
2488 callee
= gimple_call_fndecl (call
);
2490 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
2491 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
2492 && (TREE_TYPE (cfun_va_list
) == void_type_node
2493 || TREE_TYPE (cfun_va_list
) == char_type_node
);
2495 switch (DECL_FUNCTION_CODE (callee
))
2497 case BUILT_IN_VA_START
:
2498 if (!va_list_simple_ptr
2499 || targetm
.expand_builtin_va_start
!= NULL
2500 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG
))
2503 if (gimple_call_num_args (call
) != 2)
2506 lhs
= gimple_call_arg (call
, 0);
2507 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2508 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2509 != TYPE_MAIN_VARIANT (cfun_va_list
))
2512 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
2513 rhs
= build_call_expr_loc (loc
, builtin_decl_explicit (BUILT_IN_NEXT_ARG
),
2514 1, integer_zero_node
);
2515 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
2516 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2518 case BUILT_IN_VA_COPY
:
2519 if (!va_list_simple_ptr
)
2522 if (gimple_call_num_args (call
) != 2)
2525 lhs
= gimple_call_arg (call
, 0);
2526 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2527 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2528 != TYPE_MAIN_VARIANT (cfun_va_list
))
2531 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
2532 rhs
= gimple_call_arg (call
, 1);
2533 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
2534 != TYPE_MAIN_VARIANT (cfun_va_list
))
2537 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
2538 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2540 case BUILT_IN_VA_END
:
2541 /* No effect, so the statement will be deleted. */
2542 return integer_zero_node
;
2549 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2550 the incoming jumps. Return true if at least one jump was changed. */
2553 optimize_unreachable (gimple_stmt_iterator i
)
2555 basic_block bb
= gsi_bb (i
);
2556 gimple_stmt_iterator gsi
;
2562 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2564 stmt
= gsi_stmt (gsi
);
2566 if (is_gimple_debug (stmt
))
2569 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2571 /* Verify we do not need to preserve the label. */
2572 if (FORCED_LABEL (gimple_label_label (stmt
)))
2578 /* Only handle the case that __builtin_unreachable is the first statement
2579 in the block. We rely on DCE to remove stmts without side-effects
2580 before __builtin_unreachable. */
2581 if (gsi_stmt (gsi
) != gsi_stmt (i
))
2586 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2588 gsi
= gsi_last_bb (e
->src
);
2589 if (gsi_end_p (gsi
))
2592 stmt
= gsi_stmt (gsi
);
2593 if (gimple_code (stmt
) == GIMPLE_COND
)
2595 if (e
->flags
& EDGE_TRUE_VALUE
)
2596 gimple_cond_make_false (stmt
);
2597 else if (e
->flags
& EDGE_FALSE_VALUE
)
2598 gimple_cond_make_true (stmt
);
2605 /* Todo: handle other cases, f.i. switch statement. */
2615 /* A simple pass that attempts to fold all builtin functions. This pass
2616 is run after we've propagated as many constants as we can. */
2620 const pass_data pass_data_fold_builtins
=
2622 GIMPLE_PASS
, /* type */
2624 OPTGROUP_NONE
, /* optinfo_flags */
2625 true, /* has_execute */
2626 TV_NONE
, /* tv_id */
2627 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2628 0, /* properties_provided */
2629 0, /* properties_destroyed */
2630 0, /* todo_flags_start */
2631 TODO_update_ssa
, /* todo_flags_finish */
2634 class pass_fold_builtins
: public gimple_opt_pass
2637 pass_fold_builtins (gcc::context
*ctxt
)
2638 : gimple_opt_pass (pass_data_fold_builtins
, ctxt
)
2641 /* opt_pass methods: */
2642 opt_pass
* clone () { return new pass_fold_builtins (m_ctxt
); }
2643 virtual unsigned int execute (function
*);
2645 }; // class pass_fold_builtins
2648 pass_fold_builtins::execute (function
*fun
)
2650 bool cfg_changed
= false;
2652 unsigned int todoflags
= 0;
2654 FOR_EACH_BB_FN (bb
, fun
)
2656 gimple_stmt_iterator i
;
2657 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
2659 gimple stmt
, old_stmt
;
2660 tree callee
, result
;
2661 enum built_in_function fcode
;
2663 stmt
= gsi_stmt (i
);
2665 if (gimple_code (stmt
) != GIMPLE_CALL
)
2667 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2668 after the last GIMPLE DSE they aren't needed and might
2669 unnecessarily keep the SSA_NAMEs live. */
2670 if (gimple_clobber_p (stmt
))
2672 tree lhs
= gimple_assign_lhs (stmt
);
2673 if (TREE_CODE (lhs
) == MEM_REF
2674 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
2676 unlink_stmt_vdef (stmt
);
2677 gsi_remove (&i
, true);
2678 release_defs (stmt
);
2685 callee
= gimple_call_fndecl (stmt
);
2686 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2691 fcode
= DECL_FUNCTION_CODE (callee
);
2693 result
= gimple_fold_builtin (stmt
);
2696 gimple_remove_stmt_histograms (fun
, stmt
);
2699 switch (DECL_FUNCTION_CODE (callee
))
2701 case BUILT_IN_CONSTANT_P
:
2702 /* Resolve __builtin_constant_p. If it hasn't been
2703 folded to integer_one_node by now, it's fairly
2704 certain that the value simply isn't constant. */
2705 result
= integer_zero_node
;
2708 case BUILT_IN_ASSUME_ALIGNED
:
2709 /* Remove __builtin_assume_aligned. */
2710 result
= gimple_call_arg (stmt
, 0);
2713 case BUILT_IN_STACK_RESTORE
:
2714 result
= optimize_stack_restore (i
);
2720 case BUILT_IN_UNREACHABLE
:
2721 if (optimize_unreachable (i
))
2725 case BUILT_IN_VA_START
:
2726 case BUILT_IN_VA_END
:
2727 case BUILT_IN_VA_COPY
:
2728 /* These shouldn't be folded before pass_stdarg. */
2729 result
= optimize_stdarg_builtin (stmt
);
2739 if (result
== NULL_TREE
)
2742 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2744 fprintf (dump_file
, "Simplified\n ");
2745 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2749 if (!update_call_from_tree (&i
, result
))
2751 gimplify_and_update_call_from_tree (&i
, result
);
2752 todoflags
|= TODO_update_address_taken
;
2755 stmt
= gsi_stmt (i
);
2758 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
2759 && gimple_purge_dead_eh_edges (bb
))
2762 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2764 fprintf (dump_file
, "to\n ");
2765 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2766 fprintf (dump_file
, "\n");
2769 /* Retry the same statement if it changed into another
2770 builtin, there might be new opportunities now. */
2771 if (gimple_code (stmt
) != GIMPLE_CALL
)
2776 callee
= gimple_call_fndecl (stmt
);
2778 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2779 || DECL_FUNCTION_CODE (callee
) == fcode
)
2784 /* Delete unreachable blocks. */
2786 todoflags
|= TODO_cleanup_cfg
;
2794 make_pass_fold_builtins (gcc::context
*ctxt
)
2796 return new pass_fold_builtins (ctxt
);