1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
34 /* Return true when DECL is static object in other partition.
35 In that case we must prevent folding as we can't refer to
38 We can get into it in two ways:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body. */
52 static_object_in_other_unit_p (tree decl
)
54 struct varpool_node
*vnode
;
55 struct cgraph_node
*node
;
57 if (!TREE_STATIC (decl
)
58 || TREE_PUBLIC (decl
) || DECL_COMDAT (decl
))
60 /* External flag is set, so we deal with C++ reference
61 to static object from other file. */
62 if (DECL_EXTERNAL (decl
))
64 /* Just be sure it is not big in frontend setting
65 flags incorrectly. Those variables should never
67 gcc_checking_assert (!(vnode
= varpool_get_node (decl
))
68 || !vnode
->finalized
);
71 /* We are not at ltrans stage; so don't worry about WHOPR. */
74 if (TREE_CODE (decl
) == FUNCTION_DECL
)
76 node
= cgraph_get_node (decl
);
77 if (!node
|| !node
->analyzed
)
80 else if (TREE_CODE (decl
) == VAR_DECL
)
82 vnode
= varpool_get_node (decl
);
83 if (!vnode
|| !vnode
->finalized
)
89 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
90 acceptable form for is_gimple_min_invariant. */
93 canonicalize_constructor_val (tree cval
)
96 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
)
98 tree t
= maybe_fold_offset_to_address (EXPR_LOCATION (cval
),
99 TREE_OPERAND (cval
, 0),
100 TREE_OPERAND (cval
, 1),
105 if (TREE_CODE (cval
) == ADDR_EXPR
)
107 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
109 && (TREE_CODE (base
) == VAR_DECL
110 || TREE_CODE (base
) == FUNCTION_DECL
)
111 && static_object_in_other_unit_p (base
))
113 if (base
&& TREE_CODE (base
) == VAR_DECL
)
114 add_referenced_var (base
);
119 /* If SYM is a constant variable with known value, return the value.
120 NULL_TREE is returned otherwise. */
123 get_symbol_constant_value (tree sym
)
125 if (const_value_known_p (sym
))
127 tree val
= DECL_INITIAL (sym
);
130 val
= canonicalize_constructor_val (val
);
131 if (val
&& is_gimple_min_invariant (val
))
136 /* Variables declared 'const' without an initializer
137 have zero as the initializer if they may not be
138 overridden at link or run time. */
140 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
141 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
142 return fold_convert (TREE_TYPE (sym
), integer_zero_node
);
149 /* Return true if we may propagate the address expression ADDR into the
150 dereference DEREF and cancel them. */
153 may_propagate_address_into_dereference (tree addr
, tree deref
)
155 gcc_assert (TREE_CODE (deref
) == MEM_REF
156 && TREE_CODE (addr
) == ADDR_EXPR
);
158 /* Don't propagate if ADDR's operand has incomplete type. */
159 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
162 /* If the address is invariant then we do not need to preserve restrict
163 qualifications. But we do need to preserve volatile qualifiers until
164 we can annotate the folded dereference itself properly. */
165 if (is_gimple_min_invariant (addr
)
166 && (!TREE_THIS_VOLATILE (deref
)
167 || TYPE_VOLATILE (TREE_TYPE (addr
))))
168 return useless_type_conversion_p (TREE_TYPE (deref
),
169 TREE_TYPE (TREE_OPERAND (addr
, 0)));
171 /* Else both the address substitution and the folding must result in
172 a valid useless type conversion sequence. */
173 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
175 && useless_type_conversion_p (TREE_TYPE (deref
),
176 TREE_TYPE (TREE_OPERAND (addr
, 0))));
180 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
181 BASE is an array type. OFFSET is a byte displacement.
183 LOC is the location of the original expression. */
186 maybe_fold_offset_to_array_ref (location_t loc
, tree base
, tree offset
)
188 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
189 tree array_type
, elt_type
, elt_size
;
192 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
193 measured in units of the size of elements type) from that ARRAY_REF).
194 We can't do anything if either is variable.
196 The case we handle here is *(&A[N]+O). */
197 if (TREE_CODE (base
) == ARRAY_REF
)
199 tree low_bound
= array_ref_low_bound (base
);
201 elt_offset
= TREE_OPERAND (base
, 1);
202 if (TREE_CODE (low_bound
) != INTEGER_CST
203 || TREE_CODE (elt_offset
) != INTEGER_CST
)
206 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
207 base
= TREE_OPERAND (base
, 0);
210 /* Ignore stupid user tricks of indexing non-array variables. */
211 array_type
= TREE_TYPE (base
);
212 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
214 elt_type
= TREE_TYPE (array_type
);
216 /* Use signed size type for intermediate computation on the index. */
217 idx_type
= ssizetype
;
219 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
220 element type (so we can use the alignment if it's not constant).
221 Otherwise, compute the offset as an index by using a division. If the
222 division isn't exact, then don't do anything. */
223 elt_size
= TYPE_SIZE_UNIT (elt_type
);
226 if (integer_zerop (offset
))
228 if (TREE_CODE (elt_size
) != INTEGER_CST
)
229 elt_size
= size_int (TYPE_ALIGN (elt_type
));
231 idx
= build_int_cst (idx_type
, 0);
235 unsigned HOST_WIDE_INT lquo
, lrem
;
236 HOST_WIDE_INT hquo
, hrem
;
239 /* The final array offset should be signed, so we need
240 to sign-extend the (possibly pointer) offset here
241 and use signed division. */
242 soffset
= double_int_sext (tree_to_double_int (offset
),
243 TYPE_PRECISION (TREE_TYPE (offset
)));
244 if (TREE_CODE (elt_size
) != INTEGER_CST
245 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
246 soffset
.low
, soffset
.high
,
247 TREE_INT_CST_LOW (elt_size
),
248 TREE_INT_CST_HIGH (elt_size
),
249 &lquo
, &hquo
, &lrem
, &hrem
)
253 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
256 /* Assume the low bound is zero. If there is a domain type, get the
257 low bound, if any, convert the index into that type, and add the
259 min_idx
= build_int_cst (idx_type
, 0);
260 domain_type
= TYPE_DOMAIN (array_type
);
263 idx_type
= domain_type
;
264 if (TYPE_MIN_VALUE (idx_type
))
265 min_idx
= TYPE_MIN_VALUE (idx_type
);
267 min_idx
= fold_convert (idx_type
, min_idx
);
269 if (TREE_CODE (min_idx
) != INTEGER_CST
)
272 elt_offset
= fold_convert (idx_type
, elt_offset
);
275 if (!integer_zerop (min_idx
))
276 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
277 if (!integer_zerop (elt_offset
))
278 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
280 /* Make sure to possibly truncate late after offsetting. */
281 idx
= fold_convert (idx_type
, idx
);
283 /* We don't want to construct access past array bounds. For example
286 should not be simplified into (*c)[14] or tree-vrp will
288 This is only an issue for multi-dimensional arrays. */
289 if (TREE_CODE (elt_type
) == ARRAY_TYPE
292 if (TYPE_MAX_VALUE (domain_type
)
293 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
294 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type
), idx
))
296 else if (TYPE_MIN_VALUE (domain_type
)
297 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
298 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
300 else if (compare_tree_int (idx
, 0) < 0)
305 tree t
= build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
306 SET_EXPR_LOCATION (t
, loc
);
312 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
313 LOC is the location of original expression.
315 Before attempting the conversion strip off existing ADDR_EXPRs. */
318 maybe_fold_offset_to_reference (location_t loc
, tree base
, tree offset
,
324 if (TREE_CODE (base
) != ADDR_EXPR
)
327 base
= TREE_OPERAND (base
, 0);
328 if (types_compatible_p (orig_type
, TREE_TYPE (base
))
329 && integer_zerop (offset
))
332 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
);
333 if (ret
&& types_compatible_p (orig_type
, TREE_TYPE (ret
)))
338 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
339 LOC is the location of the original expression. */
342 maybe_fold_offset_to_address (location_t loc
, tree addr
, tree offset
,
348 if (TREE_CODE (addr
) != ADDR_EXPR
)
350 base
= TREE_OPERAND (addr
, 0);
351 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
);
354 ret
= build_fold_addr_expr (ret
);
355 if (!useless_type_conversion_p (orig_type
, TREE_TYPE (ret
)))
357 SET_EXPR_LOCATION (ret
, loc
);
364 /* A quaint feature extant in our address arithmetic is that there
365 can be hidden type changes here. The type of the result need
366 not be the same as the type of the input pointer.
368 What we're after here is an expression of the form
369 (T *)(&array + const)
370 where array is OP0, const is OP1, RES_TYPE is T and
371 the cast doesn't actually exist, but is implicit in the
372 type of the POINTER_PLUS_EXPR. We'd like to turn this into
374 which may be able to propagate further. */
377 maybe_fold_stmt_addition (location_t loc
, tree res_type
, tree op0
, tree op1
)
382 /* The first operand should be an ADDR_EXPR. */
383 if (TREE_CODE (op0
) != ADDR_EXPR
)
385 op0
= TREE_OPERAND (op0
, 0);
387 /* It had better be a constant. */
388 if (TREE_CODE (op1
) != INTEGER_CST
)
390 /* Or op0 should now be A[0] and the non-constant offset defined
391 via a multiplication by the array element size. */
392 if (TREE_CODE (op0
) == ARRAY_REF
393 /* As we will end up creating a variable index array access
394 in the outermost array dimension make sure there isn't
395 a more inner array that the index could overflow to. */
396 && TREE_CODE (TREE_OPERAND (op0
, 0)) != ARRAY_REF
397 && integer_zerop (TREE_OPERAND (op0
, 1))
398 && TREE_CODE (op1
) == SSA_NAME
)
400 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
401 tree elsz
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
402 if (!host_integerp (elsz
, 1)
403 || !is_gimple_assign (offset_def
))
406 /* Do not build array references of something that we can't
407 see the true number of array dimensions for. */
408 if (!DECL_P (TREE_OPERAND (op0
, 0))
409 && !handled_component_p (TREE_OPERAND (op0
, 0)))
412 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
413 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
414 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
), elsz
))
415 return build_fold_addr_expr
416 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
417 TREE_OPERAND (op0
, 0),
418 gimple_assign_rhs1 (offset_def
),
419 TREE_OPERAND (op0
, 2),
420 TREE_OPERAND (op0
, 3)));
421 else if (integer_onep (elsz
)
422 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
423 return build_fold_addr_expr
424 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
425 TREE_OPERAND (op0
, 0),
427 TREE_OPERAND (op0
, 2),
428 TREE_OPERAND (op0
, 3)));
430 else if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
432 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0
))) != ARRAY_TYPE
433 && TREE_CODE (op1
) == SSA_NAME
)
435 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
436 tree elsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0
)));
437 if (!host_integerp (elsz
, 1)
438 || !is_gimple_assign (offset_def
))
441 /* Do not build array references of something that we can't
442 see the true number of array dimensions for. */
444 && !handled_component_p (op0
))
447 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
448 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
449 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
), elsz
))
450 return build_fold_addr_expr
451 (build4 (ARRAY_REF
, TREE_TYPE (TREE_TYPE (op0
)),
452 op0
, gimple_assign_rhs1 (offset_def
),
453 integer_zero_node
, NULL_TREE
));
454 else if (integer_onep (elsz
)
455 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
456 return build_fold_addr_expr
457 (build4 (ARRAY_REF
, TREE_TYPE (TREE_TYPE (op0
)),
459 integer_zero_node
, NULL_TREE
));
465 /* If the first operand is an ARRAY_REF, expand it so that we can fold
466 the offset into it. */
467 while (TREE_CODE (op0
) == ARRAY_REF
)
469 tree array_obj
= TREE_OPERAND (op0
, 0);
470 tree array_idx
= TREE_OPERAND (op0
, 1);
471 tree elt_type
= TREE_TYPE (op0
);
472 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
475 if (TREE_CODE (array_idx
) != INTEGER_CST
)
477 if (TREE_CODE (elt_size
) != INTEGER_CST
)
480 /* Un-bias the index by the min index of the array type. */
481 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
484 min_idx
= TYPE_MIN_VALUE (min_idx
);
487 if (TREE_CODE (min_idx
) != INTEGER_CST
)
490 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
491 if (!integer_zerop (min_idx
))
492 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
497 /* Convert the index to a byte offset. */
498 array_idx
= fold_convert (sizetype
, array_idx
);
499 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
501 /* Update the operands for the next round, or for folding. */
502 op1
= int_const_binop (PLUS_EXPR
,
507 ptd_type
= TREE_TYPE (res_type
);
508 /* If we want a pointer to void, reconstruct the reference from the
509 array element type. A pointer to that can be trivially converted
510 to void *. This happens as we fold (void *)(ptr p+ off). */
511 if (VOID_TYPE_P (ptd_type
)
512 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
513 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
515 /* At which point we can try some of the same things as for indirects. */
516 t
= maybe_fold_offset_to_array_ref (loc
, op0
, op1
);
519 t
= build_fold_addr_expr (t
);
520 if (!useless_type_conversion_p (res_type
, TREE_TYPE (t
)))
522 SET_EXPR_LOCATION (t
, loc
);
528 /* Subroutine of fold_stmt. We perform several simplifications of the
529 memory reference tree EXPR and make sure to re-gimplify them properly
530 after propagation of constant addresses. IS_LHS is true if the
531 reference is supposed to be an lvalue. */
534 maybe_fold_reference (tree expr
, bool is_lhs
)
540 && (result
= fold_const_aggregate_ref (expr
))
541 && is_gimple_min_invariant (result
))
544 /* ??? We might want to open-code the relevant remaining cases
545 to avoid using the generic fold. */
546 if (handled_component_p (*t
)
547 && CONSTANT_CLASS_P (TREE_OPERAND (*t
, 0)))
549 tree tem
= fold (*t
);
554 while (handled_component_p (*t
))
555 t
= &TREE_OPERAND (*t
, 0);
557 /* Fold back MEM_REFs to reference trees. */
558 if (TREE_CODE (*t
) == MEM_REF
559 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
560 && integer_zerop (TREE_OPERAND (*t
, 1))
561 && (TREE_THIS_VOLATILE (*t
)
562 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
563 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
564 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
565 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
566 /* We have to look out here to not drop a required conversion
567 from the rhs to the lhs if is_lhs, but we don't have the
568 rhs here to verify that. Thus require strict type
570 && types_compatible_p (TREE_TYPE (*t
),
571 TREE_TYPE (TREE_OPERAND
572 (TREE_OPERAND (*t
, 0), 0))))
575 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
576 tem
= maybe_fold_reference (expr
, is_lhs
);
581 /* Canonicalize MEM_REFs invariant address operand. */
582 else if (TREE_CODE (*t
) == MEM_REF
583 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
584 && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))
585 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
587 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
588 TREE_OPERAND (*t
, 0),
589 TREE_OPERAND (*t
, 1));
593 tem
= maybe_fold_reference (expr
, is_lhs
);
599 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
601 tree tem
= maybe_fold_tmr (*t
);
605 tem
= maybe_fold_reference (expr
, is_lhs
);
614 tree tem
= get_symbol_constant_value (*t
);
616 && useless_type_conversion_p (TREE_TYPE (*t
), TREE_TYPE (tem
)))
618 *t
= unshare_expr (tem
);
619 tem
= maybe_fold_reference (expr
, is_lhs
);
630 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
631 replacement rhs for the statement or NULL_TREE if no simplification
632 could be made. It is assumed that the operands have been previously
636 fold_gimple_assign (gimple_stmt_iterator
*si
)
638 gimple stmt
= gsi_stmt (*si
);
639 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
640 location_t loc
= gimple_location (stmt
);
642 tree result
= NULL_TREE
;
644 switch (get_gimple_rhs_class (subcode
))
646 case GIMPLE_SINGLE_RHS
:
648 tree rhs
= gimple_assign_rhs1 (stmt
);
650 /* Try to fold a conditional expression. */
651 if (TREE_CODE (rhs
) == COND_EXPR
)
653 tree op0
= COND_EXPR_COND (rhs
);
656 location_t cond_loc
= EXPR_LOCATION (rhs
);
658 if (COMPARISON_CLASS_P (op0
))
660 fold_defer_overflow_warnings ();
661 tem
= fold_binary_loc (cond_loc
,
662 TREE_CODE (op0
), TREE_TYPE (op0
),
663 TREE_OPERAND (op0
, 0),
664 TREE_OPERAND (op0
, 1));
665 /* This is actually a conditional expression, not a GIMPLE
666 conditional statement, however, the valid_gimple_rhs_p
667 test still applies. */
668 set
= (tem
&& is_gimple_condexpr (tem
)
669 && valid_gimple_rhs_p (tem
));
670 fold_undefer_overflow_warnings (set
, stmt
, 0);
672 else if (is_gimple_min_invariant (op0
))
681 result
= fold_build3_loc (cond_loc
, COND_EXPR
, TREE_TYPE (rhs
), tem
,
682 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
685 else if (REFERENCE_CLASS_P (rhs
))
686 return maybe_fold_reference (rhs
, false);
688 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
690 tree ref
= TREE_OPERAND (rhs
, 0);
691 tree tem
= maybe_fold_reference (ref
, true);
693 && TREE_CODE (tem
) == MEM_REF
694 && integer_zerop (TREE_OPERAND (tem
, 1)))
695 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
697 result
= fold_convert (TREE_TYPE (rhs
),
698 build_fold_addr_expr_loc (loc
, tem
));
699 else if (TREE_CODE (ref
) == MEM_REF
700 && integer_zerop (TREE_OPERAND (ref
, 1)))
701 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
704 else if (TREE_CODE (rhs
) == CONSTRUCTOR
705 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
706 && (CONSTRUCTOR_NELTS (rhs
)
707 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
709 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
713 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
714 if (TREE_CODE (val
) != INTEGER_CST
715 && TREE_CODE (val
) != REAL_CST
716 && TREE_CODE (val
) != FIXED_CST
)
719 return build_vector_from_ctor (TREE_TYPE (rhs
),
720 CONSTRUCTOR_ELTS (rhs
));
723 else if (DECL_P (rhs
))
724 return unshare_expr (get_symbol_constant_value (rhs
));
726 /* If we couldn't fold the RHS, hand over to the generic
728 if (result
== NULL_TREE
)
731 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
732 that may have been added by fold, and "useless" type
733 conversions that might now be apparent due to propagation. */
734 STRIP_USELESS_TYPE_CONVERSION (result
);
736 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
743 case GIMPLE_UNARY_RHS
:
745 tree rhs
= gimple_assign_rhs1 (stmt
);
747 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
750 /* If the operation was a conversion do _not_ mark a
751 resulting constant with TREE_OVERFLOW if the original
752 constant was not. These conversions have implementation
753 defined behavior and retaining the TREE_OVERFLOW flag
754 here would confuse later passes such as VRP. */
755 if (CONVERT_EXPR_CODE_P (subcode
)
756 && TREE_CODE (result
) == INTEGER_CST
757 && TREE_CODE (rhs
) == INTEGER_CST
)
758 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
760 STRIP_USELESS_TYPE_CONVERSION (result
);
761 if (valid_gimple_rhs_p (result
))
764 else if (CONVERT_EXPR_CODE_P (subcode
)
765 && POINTER_TYPE_P (gimple_expr_type (stmt
))
766 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
768 tree type
= gimple_expr_type (stmt
);
769 tree t
= maybe_fold_offset_to_address (loc
,
770 gimple_assign_rhs1 (stmt
),
771 integer_zero_node
, type
);
778 case GIMPLE_BINARY_RHS
:
779 /* Try to fold pointer addition. */
780 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
782 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
783 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
785 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
786 if (!useless_type_conversion_p
787 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
788 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
790 result
= maybe_fold_stmt_addition (gimple_location (stmt
),
792 gimple_assign_rhs1 (stmt
),
793 gimple_assign_rhs2 (stmt
));
797 result
= fold_binary_loc (loc
, subcode
,
798 TREE_TYPE (gimple_assign_lhs (stmt
)),
799 gimple_assign_rhs1 (stmt
),
800 gimple_assign_rhs2 (stmt
));
804 STRIP_USELESS_TYPE_CONVERSION (result
);
805 if (valid_gimple_rhs_p (result
))
808 /* Fold might have produced non-GIMPLE, so if we trust it blindly
809 we lose canonicalization opportunities. Do not go again
810 through fold here though, or the same non-GIMPLE will be
812 if (commutative_tree_code (subcode
)
813 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
814 gimple_assign_rhs2 (stmt
), false))
815 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
816 gimple_assign_rhs2 (stmt
),
817 gimple_assign_rhs1 (stmt
));
821 case GIMPLE_TERNARY_RHS
:
822 result
= fold_ternary_loc (loc
, subcode
,
823 TREE_TYPE (gimple_assign_lhs (stmt
)),
824 gimple_assign_rhs1 (stmt
),
825 gimple_assign_rhs2 (stmt
),
826 gimple_assign_rhs3 (stmt
));
830 STRIP_USELESS_TYPE_CONVERSION (result
);
831 if (valid_gimple_rhs_p (result
))
834 /* Fold might have produced non-GIMPLE, so if we trust it blindly
835 we lose canonicalization opportunities. Do not go again
836 through fold here though, or the same non-GIMPLE will be
838 if (commutative_ternary_tree_code (subcode
)
839 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
840 gimple_assign_rhs2 (stmt
), false))
841 return build3 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
842 gimple_assign_rhs2 (stmt
),
843 gimple_assign_rhs1 (stmt
),
844 gimple_assign_rhs3 (stmt
));
848 case GIMPLE_INVALID_RHS
:
855 /* Attempt to fold a conditional statement. Return true if any changes were
856 made. We only attempt to fold the condition expression, and do not perform
857 any transformation that would require alteration of the cfg. It is
858 assumed that the operands have been previously folded. */
861 fold_gimple_cond (gimple stmt
)
863 tree result
= fold_binary_loc (gimple_location (stmt
),
864 gimple_cond_code (stmt
),
866 gimple_cond_lhs (stmt
),
867 gimple_cond_rhs (stmt
));
871 STRIP_USELESS_TYPE_CONVERSION (result
);
872 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
874 gimple_cond_set_condition_from_tree (stmt
, result
);
882 /* Convert EXPR into a GIMPLE value suitable for substitution on the
883 RHS of an assignment. Insert the necessary statements before
884 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
885 is replaced. If the call is expected to produces a result, then it
886 is replaced by an assignment of the new RHS to the result variable.
887 If the result is to be ignored, then the call is replaced by a
888 GIMPLE_NOP. A proper VDEF chain is retained by making the first
889 VUSE and the last VDEF of the whole sequence be the same as the replaced
890 statement and using new SSA names for stores in between. */
893 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
896 tree tmp
= NULL_TREE
; /* Silence warning. */
897 gimple stmt
, new_stmt
;
898 gimple_stmt_iterator i
;
899 gimple_seq stmts
= gimple_seq_alloc();
900 struct gimplify_ctx gctx
;
902 gimple laststore
= NULL
;
905 stmt
= gsi_stmt (*si_p
);
907 gcc_assert (is_gimple_call (stmt
));
909 lhs
= gimple_call_lhs (stmt
);
910 reaching_vuse
= gimple_vuse (stmt
);
912 push_gimplify_context (&gctx
);
914 if (lhs
== NULL_TREE
)
915 gimplify_and_add (expr
, &stmts
);
917 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
919 pop_gimplify_context (NULL
);
921 if (gimple_has_location (stmt
))
922 annotate_all_with_location (stmts
, gimple_location (stmt
));
924 /* The replacement can expose previously unreferenced variables. */
925 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
929 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
932 new_stmt
= gsi_stmt (i
);
933 if (gimple_in_ssa_p (cfun
))
935 find_new_referenced_vars (new_stmt
);
936 mark_symbols_for_renaming (new_stmt
);
938 /* If the new statement has a VUSE, update it with exact SSA name we
939 know will reach this one. */
940 if (gimple_vuse (new_stmt
))
942 /* If we've also seen a previous store create a new VDEF for
943 the latter one, and make that the new reaching VUSE. */
946 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
947 gimple_set_vdef (laststore
, reaching_vuse
);
948 update_stmt (laststore
);
951 gimple_set_vuse (new_stmt
, reaching_vuse
);
952 gimple_set_modified (new_stmt
, true);
954 if (gimple_assign_single_p (new_stmt
)
955 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
957 laststore
= new_stmt
;
962 if (lhs
== NULL_TREE
)
964 /* If we replace a call without LHS that has a VDEF and our new
965 sequence ends with a store we must make that store have the same
966 vdef in order not to break the sequencing. This can happen
967 for instance when folding memcpy calls into assignments. */
968 if (gimple_vdef (stmt
) && laststore
)
970 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
971 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
972 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
973 update_stmt (laststore
);
975 else if (gimple_in_ssa_p (cfun
))
977 unlink_stmt_vdef (stmt
);
986 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
989 if (laststore
&& is_gimple_reg (lhs
))
991 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
992 update_stmt (laststore
);
993 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
994 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
999 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
1000 gimple_set_vdef (laststore
, reaching_vuse
);
1001 update_stmt (laststore
);
1004 new_stmt
= gimple_build_assign (lhs
, tmp
);
1005 if (!is_gimple_reg (tmp
))
1006 gimple_set_vuse (new_stmt
, reaching_vuse
);
1007 if (!is_gimple_reg (lhs
))
1009 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1010 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1011 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = new_stmt
;
1015 gimple_set_location (new_stmt
, gimple_location (stmt
));
1016 gsi_replace (si_p
, new_stmt
, false);
1019 /* Return the string length, maximum string length or maximum value of
1021 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1022 is not NULL and, for TYPE == 0, its value is not equal to the length
1023 we determine or if we are unable to determine the length or value,
1024 return false. VISITED is a bitmap of visited variables.
1025 TYPE is 0 if string length should be returned, 1 for maximum string
1026 length and 2 for maximum value ARG can have. */
1029 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
1034 if (TREE_CODE (arg
) != SSA_NAME
)
1036 if (TREE_CODE (arg
) == COND_EXPR
)
1037 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
1038 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
1039 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1040 else if (TREE_CODE (arg
) == ADDR_EXPR
1041 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1042 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1044 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1045 if (TREE_CODE (aop0
) == INDIRECT_REF
1046 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1047 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1048 length
, visited
, type
);
1054 if (TREE_CODE (val
) != INTEGER_CST
1055 || tree_int_cst_sgn (val
) < 0)
1059 val
= c_strlen (arg
, 1);
1067 if (TREE_CODE (*length
) != INTEGER_CST
1068 || TREE_CODE (val
) != INTEGER_CST
)
1071 if (tree_int_cst_lt (*length
, val
))
1075 else if (simple_cst_equal (val
, *length
) != 1)
1083 /* If we were already here, break the infinite cycle. */
1084 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
1088 def_stmt
= SSA_NAME_DEF_STMT (var
);
1090 switch (gimple_code (def_stmt
))
1093 /* The RHS of the statement defining VAR must either have a
1094 constant length or come from another SSA_NAME with a constant
1096 if (gimple_assign_single_p (def_stmt
)
1097 || gimple_assign_unary_nop_p (def_stmt
))
1099 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1100 return get_maxval_strlen (rhs
, length
, visited
, type
);
1106 /* All the arguments of the PHI node must have the same constant
1110 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1112 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1114 /* If this PHI has itself as an argument, we cannot
1115 determine the string length of this argument. However,
1116 if we can find a constant string length for the other
1117 PHI args then we can still be sure that this is a
1118 constant string length. So be optimistic and just
1119 continue with the next argument. */
1120 if (arg
== gimple_phi_result (def_stmt
))
1123 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1135 /* Fold builtin call in statement STMT. Returns a simplified tree.
1136 We may return a non-constant expression, including another call
1137 to a different function and with different arguments, e.g.,
1138 substituting memcpy for strcpy when the string length is known.
1139 Note that some builtins expand into inline code that may not
1140 be valid in GIMPLE. Callers must take care. */
1143 gimple_fold_builtin (gimple stmt
)
1145 tree result
, val
[3];
1151 location_t loc
= gimple_location (stmt
);
1153 gcc_assert (is_gimple_call (stmt
));
1155 ignore
= (gimple_call_lhs (stmt
) == NULL
);
1157 /* First try the generic builtin folder. If that succeeds, return the
1159 result
= fold_call_stmt (stmt
, ignore
);
1163 STRIP_NOPS (result
);
1167 /* Ignore MD builtins. */
1168 callee
= gimple_call_fndecl (stmt
);
1169 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1172 /* If the builtin could not be folded, and it has no argument list,
1174 nargs
= gimple_call_num_args (stmt
);
1178 /* Limit the work only for builtins we know how to simplify. */
1179 switch (DECL_FUNCTION_CODE (callee
))
1181 case BUILT_IN_STRLEN
:
1182 case BUILT_IN_FPUTS
:
1183 case BUILT_IN_FPUTS_UNLOCKED
:
1187 case BUILT_IN_STRCPY
:
1188 case BUILT_IN_STRNCPY
:
1192 case BUILT_IN_MEMCPY_CHK
:
1193 case BUILT_IN_MEMPCPY_CHK
:
1194 case BUILT_IN_MEMMOVE_CHK
:
1195 case BUILT_IN_MEMSET_CHK
:
1196 case BUILT_IN_STRNCPY_CHK
:
1200 case BUILT_IN_STRCPY_CHK
:
1201 case BUILT_IN_STPCPY_CHK
:
1205 case BUILT_IN_SNPRINTF_CHK
:
1206 case BUILT_IN_VSNPRINTF_CHK
:
1214 if (arg_idx
>= nargs
)
1217 /* Try to use the dataflow information gathered by the CCP process. */
1218 visited
= BITMAP_ALLOC (NULL
);
1219 bitmap_clear (visited
);
1221 memset (val
, 0, sizeof (val
));
1222 a
= gimple_call_arg (stmt
, arg_idx
);
1223 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
1224 val
[arg_idx
] = NULL_TREE
;
1226 BITMAP_FREE (visited
);
1229 switch (DECL_FUNCTION_CODE (callee
))
1231 case BUILT_IN_STRLEN
:
1232 if (val
[0] && nargs
== 1)
1235 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
1237 /* If the result is not a valid gimple value, or not a cast
1238 of a valid gimple value, then we cannot use the result. */
1239 if (is_gimple_val (new_val
)
1240 || (is_gimple_cast (new_val
)
1241 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
1246 case BUILT_IN_STRCPY
:
1247 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
1248 result
= fold_builtin_strcpy (loc
, callee
,
1249 gimple_call_arg (stmt
, 0),
1250 gimple_call_arg (stmt
, 1),
1254 case BUILT_IN_STRNCPY
:
1255 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1256 result
= fold_builtin_strncpy (loc
, callee
,
1257 gimple_call_arg (stmt
, 0),
1258 gimple_call_arg (stmt
, 1),
1259 gimple_call_arg (stmt
, 2),
1263 case BUILT_IN_FPUTS
:
1265 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1266 gimple_call_arg (stmt
, 1),
1267 ignore
, false, val
[0]);
1270 case BUILT_IN_FPUTS_UNLOCKED
:
1272 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1273 gimple_call_arg (stmt
, 1),
1274 ignore
, true, val
[0]);
1277 case BUILT_IN_MEMCPY_CHK
:
1278 case BUILT_IN_MEMPCPY_CHK
:
1279 case BUILT_IN_MEMMOVE_CHK
:
1280 case BUILT_IN_MEMSET_CHK
:
1281 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1282 result
= fold_builtin_memory_chk (loc
, callee
,
1283 gimple_call_arg (stmt
, 0),
1284 gimple_call_arg (stmt
, 1),
1285 gimple_call_arg (stmt
, 2),
1286 gimple_call_arg (stmt
, 3),
1288 DECL_FUNCTION_CODE (callee
));
1291 case BUILT_IN_STRCPY_CHK
:
1292 case BUILT_IN_STPCPY_CHK
:
1293 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1294 result
= fold_builtin_stxcpy_chk (loc
, callee
,
1295 gimple_call_arg (stmt
, 0),
1296 gimple_call_arg (stmt
, 1),
1297 gimple_call_arg (stmt
, 2),
1299 DECL_FUNCTION_CODE (callee
));
1302 case BUILT_IN_STRNCPY_CHK
:
1303 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1304 result
= fold_builtin_strncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1305 gimple_call_arg (stmt
, 1),
1306 gimple_call_arg (stmt
, 2),
1307 gimple_call_arg (stmt
, 3),
1311 case BUILT_IN_SNPRINTF_CHK
:
1312 case BUILT_IN_VSNPRINTF_CHK
:
1313 if (val
[1] && is_gimple_val (val
[1]))
1314 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1315 DECL_FUNCTION_CODE (callee
));
1322 if (result
&& ignore
)
1323 result
= fold_ignored_result (result
);
1327 /* Return the first of the base binfos of BINFO that has virtual functions. */
1330 get_first_base_binfo_with_virtuals (tree binfo
)
1335 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
1336 if (BINFO_VIRTUALS (base_binfo
))
1343 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1344 it is found or NULL_TREE if it is not. */
1347 get_base_binfo_for_type (tree binfo
, tree type
)
1351 tree res
= NULL_TREE
;
1353 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
1354 if (TREE_TYPE (base_binfo
) == type
)
1363 /* Return a binfo describing the part of object referenced by expression REF.
1364 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1365 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1366 a simple declaration, indirect reference or an SSA_NAME. If the function
1367 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1368 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1369 Otherwise the first non-artificial field declaration or the base declaration
1370 will be examined to get the encapsulating type. */
1373 gimple_get_relevant_ref_binfo (tree ref
, tree known_binfo
)
1377 if (TREE_CODE (ref
) == COMPONENT_REF
)
1380 tree binfo
, base_binfo
;
1381 tree field
= TREE_OPERAND (ref
, 1);
1383 if (!DECL_ARTIFICIAL (field
))
1385 tree type
= TREE_TYPE (field
);
1386 if (TREE_CODE (type
) == RECORD_TYPE
)
1387 return TYPE_BINFO (type
);
1392 par_type
= TREE_TYPE (TREE_OPERAND (ref
, 0));
1393 binfo
= TYPE_BINFO (par_type
);
1395 || BINFO_N_BASE_BINFOS (binfo
) == 0)
1398 base_binfo
= get_first_base_binfo_with_virtuals (binfo
);
1399 if (base_binfo
&& BINFO_TYPE (base_binfo
) != TREE_TYPE (field
))
1403 d_binfo
= gimple_get_relevant_ref_binfo (TREE_OPERAND (ref
, 0),
1405 /* Get descendant binfo. */
1408 return get_base_binfo_for_type (d_binfo
, TREE_TYPE (field
));
1411 ref
= TREE_OPERAND (ref
, 0);
1413 else if (DECL_P (ref
) && TREE_CODE (TREE_TYPE (ref
)) == RECORD_TYPE
)
1414 return TYPE_BINFO (TREE_TYPE (ref
));
1415 else if (known_binfo
1416 && (TREE_CODE (ref
) == SSA_NAME
1417 || TREE_CODE (ref
) == MEM_REF
))
1424 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1425 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1426 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1429 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token
, tree known_binfo
)
1434 v
= BINFO_VIRTUALS (known_binfo
);
1438 i
+= (TARGET_VTABLE_USES_DESCRIPTORS
1439 ? TARGET_VTABLE_USES_DESCRIPTORS
: 1);
1443 fndecl
= TREE_VALUE (v
);
1444 /* When cgraph node is missing and function is not public, we cannot
1445 devirtualize. This can happen in WHOPR when the actual method
1446 ends up in other partition, because we found devirtualization
1447 possibility too late. */
1448 if (static_object_in_other_unit_p (fndecl
))
1450 return build_fold_addr_expr (fndecl
);
1454 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1455 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1456 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1457 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1458 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1461 gimple_fold_obj_type_ref (tree ref
, tree known_type
)
1463 tree obj
= OBJ_TYPE_REF_OBJECT (ref
);
1464 tree known_binfo
= known_type
? TYPE_BINFO (known_type
) : NULL_TREE
;
1467 if (TREE_CODE (obj
) == ADDR_EXPR
)
1468 obj
= TREE_OPERAND (obj
, 0);
1470 binfo
= gimple_get_relevant_ref_binfo (obj
, known_binfo
);
1473 HOST_WIDE_INT token
= tree_low_cst (OBJ_TYPE_REF_TOKEN (ref
), 1);
1474 /* If there is no virtual methods fold this to an indirect call. */
1475 if (!BINFO_VIRTUALS (binfo
))
1476 return OBJ_TYPE_REF_EXPR (ref
);
1477 return gimple_fold_obj_type_ref_known_binfo (token
, binfo
);
1483 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1484 The statement may be replaced by another statement, e.g., if the call
1485 simplifies to a constant value. Return true if any changes were made.
1486 It is assumed that the operands have been previously folded. */
1489 fold_gimple_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1491 gimple stmt
= gsi_stmt (*gsi
);
1493 tree callee
= gimple_call_fndecl (stmt
);
1495 /* Check for builtins that CCP can handle using information not
1496 available in the generic fold routines. */
1497 if (!inplace
&& callee
&& DECL_BUILT_IN (callee
))
1499 tree result
= gimple_fold_builtin (stmt
);
1503 if (!update_call_from_tree (gsi
, result
))
1504 gimplify_and_update_call_from_tree (gsi
, result
);
1510 /* ??? Should perhaps do this in fold proper. However, doing it
1511 there requires that we create a new CALL_EXPR, and that requires
1512 copying EH region info to the new node. Easier to just do it
1513 here where we can just smash the call operand. */
1514 callee
= gimple_call_fn (stmt
);
1515 if (TREE_CODE (callee
) == OBJ_TYPE_REF
1516 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
)
1520 t
= gimple_fold_obj_type_ref (callee
, NULL_TREE
);
1523 gimple_call_set_fn (stmt
, t
);
1532 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1533 distinguishes both cases. */
1536 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1538 bool changed
= false;
1539 gimple stmt
= gsi_stmt (*gsi
);
1542 /* Fold the main computation performed by the statement. */
1543 switch (gimple_code (stmt
))
1547 unsigned old_num_ops
= gimple_num_ops (stmt
);
1548 tree new_rhs
= fold_gimple_assign (gsi
);
1549 tree lhs
= gimple_assign_lhs (stmt
);
1551 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1552 TREE_TYPE (new_rhs
)))
1553 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1556 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1558 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1565 changed
|= fold_gimple_cond (stmt
);
1569 /* Fold *& in call arguments. */
1570 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1571 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1573 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1576 gimple_call_set_arg (stmt
, i
, tmp
);
1580 changed
|= fold_gimple_call (gsi
, inplace
);
1584 /* Fold *& in asm operands. */
1585 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1587 tree link
= gimple_asm_output_op (stmt
, i
);
1588 tree op
= TREE_VALUE (link
);
1589 if (REFERENCE_CLASS_P (op
)
1590 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1592 TREE_VALUE (link
) = op
;
1596 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1598 tree link
= gimple_asm_input_op (stmt
, i
);
1599 tree op
= TREE_VALUE (link
);
1600 if (REFERENCE_CLASS_P (op
)
1601 && (op
= maybe_fold_reference (op
, false)) != NULL_TREE
)
1603 TREE_VALUE (link
) = op
;
1610 if (gimple_debug_bind_p (stmt
))
1612 tree val
= gimple_debug_bind_get_value (stmt
);
1614 && REFERENCE_CLASS_P (val
))
1616 tree tem
= maybe_fold_reference (val
, false);
1619 gimple_debug_bind_set_value (stmt
, tem
);
1629 stmt
= gsi_stmt (*gsi
);
1631 /* Fold *& on the lhs. */
1632 if (gimple_has_lhs (stmt
))
1634 tree lhs
= gimple_get_lhs (stmt
);
1635 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1637 tree new_lhs
= maybe_fold_reference (lhs
, true);
1640 gimple_set_lhs (stmt
, new_lhs
);
1649 /* Fold the statement pointed to by GSI. In some cases, this function may
1650 replace the whole statement with a new one. Returns true iff folding
1652 The statement pointed to by GSI should be in valid gimple form but may
1653 be in unfolded state as resulting from for example constant propagation
1654 which can produce *&x = 0. */
1657 fold_stmt (gimple_stmt_iterator
*gsi
)
1659 return fold_stmt_1 (gsi
, false);
1662 /* Perform the minimal folding on statement STMT. Only operations like
1663 *&x created by constant propagation are handled. The statement cannot
1664 be replaced with a new one. Return true if the statement was
1665 changed, false otherwise.
1666 The statement STMT should be in valid gimple form but may
1667 be in unfolded state as resulting from for example constant propagation
1668 which can produce *&x = 0. */
1671 fold_stmt_inplace (gimple stmt
)
1673 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1674 bool changed
= fold_stmt_1 (&gsi
, true);
1675 gcc_assert (gsi_stmt (gsi
) == stmt
);
1679 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1680 if EXPR is null or we don't know how.
1681 If non-null, the result always has boolean type. */
1684 canonicalize_bool (tree expr
, bool invert
)
1690 if (integer_nonzerop (expr
))
1691 return boolean_false_node
;
1692 else if (integer_zerop (expr
))
1693 return boolean_true_node
;
1694 else if (TREE_CODE (expr
) == SSA_NAME
)
1695 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1696 build_int_cst (TREE_TYPE (expr
), 0));
1697 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1698 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1700 TREE_OPERAND (expr
, 0),
1701 TREE_OPERAND (expr
, 1));
1707 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1709 if (integer_nonzerop (expr
))
1710 return boolean_true_node
;
1711 else if (integer_zerop (expr
))
1712 return boolean_false_node
;
1713 else if (TREE_CODE (expr
) == SSA_NAME
)
1714 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1715 build_int_cst (TREE_TYPE (expr
), 0));
1716 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1717 return fold_build2 (TREE_CODE (expr
),
1719 TREE_OPERAND (expr
, 0),
1720 TREE_OPERAND (expr
, 1));
1726 /* Check to see if a boolean expression EXPR is logically equivalent to the
1727 comparison (OP1 CODE OP2). Check for various identities involving
1731 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1732 const_tree op1
, const_tree op2
)
1736 /* The obvious case. */
1737 if (TREE_CODE (expr
) == code
1738 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1739 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1742 /* Check for comparing (name, name != 0) and the case where expr
1743 is an SSA_NAME with a definition matching the comparison. */
1744 if (TREE_CODE (expr
) == SSA_NAME
1745 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1747 if (operand_equal_p (expr
, op1
, 0))
1748 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1749 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1750 s
= SSA_NAME_DEF_STMT (expr
);
1751 if (is_gimple_assign (s
)
1752 && gimple_assign_rhs_code (s
) == code
1753 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1754 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1758 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1759 of name is a comparison, recurse. */
1760 if (TREE_CODE (op1
) == SSA_NAME
1761 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1763 s
= SSA_NAME_DEF_STMT (op1
);
1764 if (is_gimple_assign (s
)
1765 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1767 enum tree_code c
= gimple_assign_rhs_code (s
);
1768 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1769 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1770 return same_bool_comparison_p (expr
, c
,
1771 gimple_assign_rhs1 (s
),
1772 gimple_assign_rhs2 (s
));
1773 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1774 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1775 return same_bool_comparison_p (expr
,
1776 invert_tree_comparison (c
, false),
1777 gimple_assign_rhs1 (s
),
1778 gimple_assign_rhs2 (s
));
1784 /* Check to see if two boolean expressions OP1 and OP2 are logically
1788 same_bool_result_p (const_tree op1
, const_tree op2
)
1790 /* Simple cases first. */
1791 if (operand_equal_p (op1
, op2
, 0))
1794 /* Check the cases where at least one of the operands is a comparison.
1795 These are a bit smarter than operand_equal_p in that they apply some
1796 identifies on SSA_NAMEs. */
1797 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1798 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1799 TREE_OPERAND (op2
, 0),
1800 TREE_OPERAND (op2
, 1)))
1802 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1803 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1804 TREE_OPERAND (op1
, 0),
1805 TREE_OPERAND (op1
, 1)))
1812 /* Forward declarations for some mutually recursive functions. */
1815 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1816 enum tree_code code2
, tree op2a
, tree op2b
);
1818 and_var_with_comparison (tree var
, bool invert
,
1819 enum tree_code code2
, tree op2a
, tree op2b
);
1821 and_var_with_comparison_1 (gimple stmt
,
1822 enum tree_code code2
, tree op2a
, tree op2b
);
1824 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1825 enum tree_code code2
, tree op2a
, tree op2b
);
1827 or_var_with_comparison (tree var
, bool invert
,
1828 enum tree_code code2
, tree op2a
, tree op2b
);
1830 or_var_with_comparison_1 (gimple stmt
,
1831 enum tree_code code2
, tree op2a
, tree op2b
);
1833 /* Helper function for and_comparisons_1: try to simplify the AND of the
1834 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1835 If INVERT is true, invert the value of the VAR before doing the AND.
1836 Return NULL_EXPR if we can't simplify this to a single expression. */
1839 and_var_with_comparison (tree var
, bool invert
,
1840 enum tree_code code2
, tree op2a
, tree op2b
)
1843 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1845 /* We can only deal with variables whose definitions are assignments. */
1846 if (!is_gimple_assign (stmt
))
1849 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1850 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1851 Then we only have to consider the simpler non-inverted cases. */
1853 t
= or_var_with_comparison_1 (stmt
,
1854 invert_tree_comparison (code2
, false),
1857 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1858 return canonicalize_bool (t
, invert
);
1861 /* Try to simplify the AND of the ssa variable defined by the assignment
1862 STMT with the comparison specified by (OP2A CODE2 OP2B).
1863 Return NULL_EXPR if we can't simplify this to a single expression. */
1866 and_var_with_comparison_1 (gimple stmt
,
1867 enum tree_code code2
, tree op2a
, tree op2b
)
1869 tree var
= gimple_assign_lhs (stmt
);
1870 tree true_test_var
= NULL_TREE
;
1871 tree false_test_var
= NULL_TREE
;
1872 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1874 /* Check for identities like (var AND (var == 0)) => false. */
1875 if (TREE_CODE (op2a
) == SSA_NAME
1876 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1878 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1879 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1881 true_test_var
= op2a
;
1882 if (var
== true_test_var
)
1885 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1886 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1888 false_test_var
= op2a
;
1889 if (var
== false_test_var
)
1890 return boolean_false_node
;
1894 /* If the definition is a comparison, recurse on it. */
1895 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1897 tree t
= and_comparisons_1 (innercode
,
1898 gimple_assign_rhs1 (stmt
),
1899 gimple_assign_rhs2 (stmt
),
1907 /* If the definition is an AND or OR expression, we may be able to
1908 simplify by reassociating. */
1909 if (innercode
== TRUTH_AND_EXPR
1910 || innercode
== TRUTH_OR_EXPR
1911 || (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1912 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
)))
1914 tree inner1
= gimple_assign_rhs1 (stmt
);
1915 tree inner2
= gimple_assign_rhs2 (stmt
);
1918 tree partial
= NULL_TREE
;
1919 bool is_and
= (innercode
== TRUTH_AND_EXPR
|| innercode
== BIT_AND_EXPR
);
1921 /* Check for boolean identities that don't require recursive examination
1923 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1924 inner1 AND (inner1 OR inner2) => inner1
1925 !inner1 AND (inner1 AND inner2) => false
1926 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1927 Likewise for similar cases involving inner2. */
1928 if (inner1
== true_test_var
)
1929 return (is_and
? var
: inner1
);
1930 else if (inner2
== true_test_var
)
1931 return (is_and
? var
: inner2
);
1932 else if (inner1
== false_test_var
)
1934 ? boolean_false_node
1935 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1936 else if (inner2
== false_test_var
)
1938 ? boolean_false_node
1939 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1941 /* Next, redistribute/reassociate the AND across the inner tests.
1942 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1943 if (TREE_CODE (inner1
) == SSA_NAME
1944 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1945 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1946 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1947 gimple_assign_rhs1 (s
),
1948 gimple_assign_rhs2 (s
),
1949 code2
, op2a
, op2b
)))
1951 /* Handle the AND case, where we are reassociating:
1952 (inner1 AND inner2) AND (op2a code2 op2b)
1954 If the partial result t is a constant, we win. Otherwise
1955 continue on to try reassociating with the other inner test. */
1958 if (integer_onep (t
))
1960 else if (integer_zerop (t
))
1961 return boolean_false_node
;
1964 /* Handle the OR case, where we are redistributing:
1965 (inner1 OR inner2) AND (op2a code2 op2b)
1966 => (t OR (inner2 AND (op2a code2 op2b))) */
1969 if (integer_onep (t
))
1970 return boolean_true_node
;
1972 /* Save partial result for later. */
1977 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1978 if (TREE_CODE (inner2
) == SSA_NAME
1979 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1980 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1981 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1982 gimple_assign_rhs1 (s
),
1983 gimple_assign_rhs2 (s
),
1984 code2
, op2a
, op2b
)))
1986 /* Handle the AND case, where we are reassociating:
1987 (inner1 AND inner2) AND (op2a code2 op2b)
1988 => (inner1 AND t) */
1991 if (integer_onep (t
))
1993 else if (integer_zerop (t
))
1994 return boolean_false_node
;
1997 /* Handle the OR case. where we are redistributing:
1998 (inner1 OR inner2) AND (op2a code2 op2b)
1999 => (t OR (inner1 AND (op2a code2 op2b)))
2000 => (t OR partial) */
2003 if (integer_onep (t
))
2004 return boolean_true_node
;
2007 /* We already got a simplification for the other
2008 operand to the redistributed OR expression. The
2009 interesting case is when at least one is false.
2010 Or, if both are the same, we can apply the identity
2012 if (integer_zerop (partial
))
2014 else if (integer_zerop (t
))
2016 else if (same_bool_result_p (t
, partial
))
2025 /* Try to simplify the AND of two comparisons defined by
2026 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2027 If this can be done without constructing an intermediate value,
2028 return the resulting tree; otherwise NULL_TREE is returned.
2029 This function is deliberately asymmetric as it recurses on SSA_DEFs
2030 in the first comparison but not the second. */
2033 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2034 enum tree_code code2
, tree op2a
, tree op2b
)
2036 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2037 if (operand_equal_p (op1a
, op2a
, 0)
2038 && operand_equal_p (op1b
, op2b
, 0))
2040 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2041 TRUTH_ANDIF_EXPR
, code1
, code2
,
2042 boolean_type_node
, op1a
, op1b
);
2047 /* Likewise the swapped case of the above. */
2048 if (operand_equal_p (op1a
, op2b
, 0)
2049 && operand_equal_p (op1b
, op2a
, 0))
2051 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2052 TRUTH_ANDIF_EXPR
, code1
,
2053 swap_tree_comparison (code2
),
2054 boolean_type_node
, op1a
, op1b
);
2059 /* If both comparisons are of the same value against constants, we might
2060 be able to merge them. */
2061 if (operand_equal_p (op1a
, op2a
, 0)
2062 && TREE_CODE (op1b
) == INTEGER_CST
2063 && TREE_CODE (op2b
) == INTEGER_CST
)
2065 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2067 /* If we have (op1a == op1b), we should either be able to
2068 return that or FALSE, depending on whether the constant op1b
2069 also satisfies the other comparison against op2b. */
2070 if (code1
== EQ_EXPR
)
2076 case EQ_EXPR
: val
= (cmp
== 0); break;
2077 case NE_EXPR
: val
= (cmp
!= 0); break;
2078 case LT_EXPR
: val
= (cmp
< 0); break;
2079 case GT_EXPR
: val
= (cmp
> 0); break;
2080 case LE_EXPR
: val
= (cmp
<= 0); break;
2081 case GE_EXPR
: val
= (cmp
>= 0); break;
2082 default: done
= false;
2087 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2089 return boolean_false_node
;
2092 /* Likewise if the second comparison is an == comparison. */
2093 else if (code2
== EQ_EXPR
)
2099 case EQ_EXPR
: val
= (cmp
== 0); break;
2100 case NE_EXPR
: val
= (cmp
!= 0); break;
2101 case LT_EXPR
: val
= (cmp
> 0); break;
2102 case GT_EXPR
: val
= (cmp
< 0); break;
2103 case LE_EXPR
: val
= (cmp
>= 0); break;
2104 case GE_EXPR
: val
= (cmp
<= 0); break;
2105 default: done
= false;
2110 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2112 return boolean_false_node
;
2116 /* Same business with inequality tests. */
2117 else if (code1
== NE_EXPR
)
2122 case EQ_EXPR
: val
= (cmp
!= 0); break;
2123 case NE_EXPR
: val
= (cmp
== 0); break;
2124 case LT_EXPR
: val
= (cmp
>= 0); break;
2125 case GT_EXPR
: val
= (cmp
<= 0); break;
2126 case LE_EXPR
: val
= (cmp
> 0); break;
2127 case GE_EXPR
: val
= (cmp
< 0); break;
2132 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2134 else if (code2
== NE_EXPR
)
2139 case EQ_EXPR
: val
= (cmp
== 0); break;
2140 case NE_EXPR
: val
= (cmp
!= 0); break;
2141 case LT_EXPR
: val
= (cmp
<= 0); break;
2142 case GT_EXPR
: val
= (cmp
>= 0); break;
2143 case LE_EXPR
: val
= (cmp
< 0); break;
2144 case GE_EXPR
: val
= (cmp
> 0); break;
2149 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2152 /* Chose the more restrictive of two < or <= comparisons. */
2153 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2154 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2156 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2157 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2159 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2162 /* Likewise chose the more restrictive of two > or >= comparisons. */
2163 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2164 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2166 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2167 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2169 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2172 /* Check for singleton ranges. */
2174 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
2175 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
2176 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
2178 /* Check for disjoint ranges. */
2180 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2181 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2182 return boolean_false_node
;
2184 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2185 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2186 return boolean_false_node
;
2189 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2190 NAME's definition is a truth value. See if there are any simplifications
2191 that can be done against the NAME's definition. */
2192 if (TREE_CODE (op1a
) == SSA_NAME
2193 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2194 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2196 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2197 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2198 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2199 switch (gimple_code (stmt
))
2202 /* Try to simplify by copy-propagating the definition. */
2203 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2206 /* If every argument to the PHI produces the same result when
2207 ANDed with the second comparison, we win.
2208 Do not do this unless the type is bool since we need a bool
2209 result here anyway. */
2210 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2212 tree result
= NULL_TREE
;
2214 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2216 tree arg
= gimple_phi_arg_def (stmt
, i
);
2218 /* If this PHI has itself as an argument, ignore it.
2219 If all the other args produce the same result,
2221 if (arg
== gimple_phi_result (stmt
))
2223 else if (TREE_CODE (arg
) == INTEGER_CST
)
2225 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
2228 result
= boolean_false_node
;
2229 else if (!integer_zerop (result
))
2233 result
= fold_build2 (code2
, boolean_type_node
,
2235 else if (!same_bool_comparison_p (result
,
2239 else if (TREE_CODE (arg
) == SSA_NAME
)
2241 tree temp
= and_var_with_comparison (arg
, invert
,
2247 else if (!same_bool_result_p (result
, temp
))
2263 /* Try to simplify the AND of two comparisons, specified by
2264 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2265 If this can be simplified to a single expression (without requiring
2266 introducing more SSA variables to hold intermediate values),
2267 return the resulting tree. Otherwise return NULL_TREE.
2268 If the result expression is non-null, it has boolean type. */
2271 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2272 enum tree_code code2
, tree op2a
, tree op2b
)
2274 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2278 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2281 /* Helper function for or_comparisons_1: try to simplify the OR of the
2282 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2283 If INVERT is true, invert the value of VAR before doing the OR.
2284 Return NULL_EXPR if we can't simplify this to a single expression. */
2287 or_var_with_comparison (tree var
, bool invert
,
2288 enum tree_code code2
, tree op2a
, tree op2b
)
2291 gimple stmt
= SSA_NAME_DEF_STMT (var
);
2293 /* We can only deal with variables whose definitions are assignments. */
2294 if (!is_gimple_assign (stmt
))
2297 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2298 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2299 Then we only have to consider the simpler non-inverted cases. */
2301 t
= and_var_with_comparison_1 (stmt
,
2302 invert_tree_comparison (code2
, false),
2305 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2306 return canonicalize_bool (t
, invert
);
2309 /* Try to simplify the OR of the ssa variable defined by the assignment
2310 STMT with the comparison specified by (OP2A CODE2 OP2B).
2311 Return NULL_EXPR if we can't simplify this to a single expression. */
2314 or_var_with_comparison_1 (gimple stmt
,
2315 enum tree_code code2
, tree op2a
, tree op2b
)
2317 tree var
= gimple_assign_lhs (stmt
);
2318 tree true_test_var
= NULL_TREE
;
2319 tree false_test_var
= NULL_TREE
;
2320 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2322 /* Check for identities like (var OR (var != 0)) => true . */
2323 if (TREE_CODE (op2a
) == SSA_NAME
2324 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2326 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2327 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2329 true_test_var
= op2a
;
2330 if (var
== true_test_var
)
2333 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2334 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2336 false_test_var
= op2a
;
2337 if (var
== false_test_var
)
2338 return boolean_true_node
;
2342 /* If the definition is a comparison, recurse on it. */
2343 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2345 tree t
= or_comparisons_1 (innercode
,
2346 gimple_assign_rhs1 (stmt
),
2347 gimple_assign_rhs2 (stmt
),
2355 /* If the definition is an AND or OR expression, we may be able to
2356 simplify by reassociating. */
2357 if (innercode
== TRUTH_AND_EXPR
2358 || innercode
== TRUTH_OR_EXPR
2359 || (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2360 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
)))
2362 tree inner1
= gimple_assign_rhs1 (stmt
);
2363 tree inner2
= gimple_assign_rhs2 (stmt
);
2366 tree partial
= NULL_TREE
;
2367 bool is_or
= (innercode
== TRUTH_OR_EXPR
|| innercode
== BIT_IOR_EXPR
);
2369 /* Check for boolean identities that don't require recursive examination
2371 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2372 inner1 OR (inner1 AND inner2) => inner1
2373 !inner1 OR (inner1 OR inner2) => true
2374 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2376 if (inner1
== true_test_var
)
2377 return (is_or
? var
: inner1
);
2378 else if (inner2
== true_test_var
)
2379 return (is_or
? var
: inner2
);
2380 else if (inner1
== false_test_var
)
2383 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2384 else if (inner2
== false_test_var
)
2387 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2389 /* Next, redistribute/reassociate the OR across the inner tests.
2390 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2391 if (TREE_CODE (inner1
) == SSA_NAME
2392 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2393 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2394 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2395 gimple_assign_rhs1 (s
),
2396 gimple_assign_rhs2 (s
),
2397 code2
, op2a
, op2b
)))
2399 /* Handle the OR case, where we are reassociating:
2400 (inner1 OR inner2) OR (op2a code2 op2b)
2402 If the partial result t is a constant, we win. Otherwise
2403 continue on to try reassociating with the other inner test. */
2404 if (innercode
== TRUTH_OR_EXPR
)
2406 if (integer_onep (t
))
2407 return boolean_true_node
;
2408 else if (integer_zerop (t
))
2412 /* Handle the AND case, where we are redistributing:
2413 (inner1 AND inner2) OR (op2a code2 op2b)
2414 => (t AND (inner2 OR (op2a code op2b))) */
2417 if (integer_zerop (t
))
2418 return boolean_false_node
;
2420 /* Save partial result for later. */
2425 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2426 if (TREE_CODE (inner2
) == SSA_NAME
2427 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2428 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2429 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2430 gimple_assign_rhs1 (s
),
2431 gimple_assign_rhs2 (s
),
2432 code2
, op2a
, op2b
)))
2434 /* Handle the OR case, where we are reassociating:
2435 (inner1 OR inner2) OR (op2a code2 op2b)
2437 if (innercode
== TRUTH_OR_EXPR
)
2439 if (integer_zerop (t
))
2441 else if (integer_onep (t
))
2442 return boolean_true_node
;
2445 /* Handle the AND case, where we are redistributing:
2446 (inner1 AND inner2) OR (op2a code2 op2b)
2447 => (t AND (inner1 OR (op2a code2 op2b)))
2448 => (t AND partial) */
2451 if (integer_zerop (t
))
2452 return boolean_false_node
;
2455 /* We already got a simplification for the other
2456 operand to the redistributed AND expression. The
2457 interesting case is when at least one is true.
2458 Or, if both are the same, we can apply the identity
2459 (x AND x) == true. */
2460 if (integer_onep (partial
))
2462 else if (integer_onep (t
))
2464 else if (same_bool_result_p (t
, partial
))
2465 return boolean_true_node
;
2473 /* Try to simplify the OR of two comparisons defined by
2474 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2475 If this can be done without constructing an intermediate value,
2476 return the resulting tree; otherwise NULL_TREE is returned.
2477 This function is deliberately asymmetric as it recurses on SSA_DEFs
2478 in the first comparison but not the second. */
2481 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2482 enum tree_code code2
, tree op2a
, tree op2b
)
2484 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2485 if (operand_equal_p (op1a
, op2a
, 0)
2486 && operand_equal_p (op1b
, op2b
, 0))
2488 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2489 TRUTH_ORIF_EXPR
, code1
, code2
,
2490 boolean_type_node
, op1a
, op1b
);
2495 /* Likewise the swapped case of the above. */
2496 if (operand_equal_p (op1a
, op2b
, 0)
2497 && operand_equal_p (op1b
, op2a
, 0))
2499 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2500 TRUTH_ORIF_EXPR
, code1
,
2501 swap_tree_comparison (code2
),
2502 boolean_type_node
, op1a
, op1b
);
2507 /* If both comparisons are of the same value against constants, we might
2508 be able to merge them. */
2509 if (operand_equal_p (op1a
, op2a
, 0)
2510 && TREE_CODE (op1b
) == INTEGER_CST
2511 && TREE_CODE (op2b
) == INTEGER_CST
)
2513 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2515 /* If we have (op1a != op1b), we should either be able to
2516 return that or TRUE, depending on whether the constant op1b
2517 also satisfies the other comparison against op2b. */
2518 if (code1
== NE_EXPR
)
2524 case EQ_EXPR
: val
= (cmp
== 0); break;
2525 case NE_EXPR
: val
= (cmp
!= 0); break;
2526 case LT_EXPR
: val
= (cmp
< 0); break;
2527 case GT_EXPR
: val
= (cmp
> 0); break;
2528 case LE_EXPR
: val
= (cmp
<= 0); break;
2529 case GE_EXPR
: val
= (cmp
>= 0); break;
2530 default: done
= false;
2535 return boolean_true_node
;
2537 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2540 /* Likewise if the second comparison is a != comparison. */
2541 else if (code2
== NE_EXPR
)
2547 case EQ_EXPR
: val
= (cmp
== 0); break;
2548 case NE_EXPR
: val
= (cmp
!= 0); break;
2549 case LT_EXPR
: val
= (cmp
> 0); break;
2550 case GT_EXPR
: val
= (cmp
< 0); break;
2551 case LE_EXPR
: val
= (cmp
>= 0); break;
2552 case GE_EXPR
: val
= (cmp
<= 0); break;
2553 default: done
= false;
2558 return boolean_true_node
;
2560 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2564 /* See if an equality test is redundant with the other comparison. */
2565 else if (code1
== EQ_EXPR
)
2570 case EQ_EXPR
: val
= (cmp
== 0); break;
2571 case NE_EXPR
: val
= (cmp
!= 0); break;
2572 case LT_EXPR
: val
= (cmp
< 0); break;
2573 case GT_EXPR
: val
= (cmp
> 0); break;
2574 case LE_EXPR
: val
= (cmp
<= 0); break;
2575 case GE_EXPR
: val
= (cmp
>= 0); break;
2580 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2582 else if (code2
== EQ_EXPR
)
2587 case EQ_EXPR
: val
= (cmp
== 0); break;
2588 case NE_EXPR
: val
= (cmp
!= 0); break;
2589 case LT_EXPR
: val
= (cmp
> 0); break;
2590 case GT_EXPR
: val
= (cmp
< 0); break;
2591 case LE_EXPR
: val
= (cmp
>= 0); break;
2592 case GE_EXPR
: val
= (cmp
<= 0); break;
2597 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2600 /* Chose the less restrictive of two < or <= comparisons. */
2601 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2602 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2604 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2605 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2607 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2610 /* Likewise chose the less restrictive of two > or >= comparisons. */
2611 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2612 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2614 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2615 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2617 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2620 /* Check for singleton ranges. */
2622 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2623 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2624 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2626 /* Check for less/greater pairs that don't restrict the range at all. */
2628 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2629 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2630 return boolean_true_node
;
2632 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2633 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2634 return boolean_true_node
;
2637 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2638 NAME's definition is a truth value. See if there are any simplifications
2639 that can be done against the NAME's definition. */
2640 if (TREE_CODE (op1a
) == SSA_NAME
2641 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2642 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2644 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2645 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2646 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2647 switch (gimple_code (stmt
))
2650 /* Try to simplify by copy-propagating the definition. */
2651 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2654 /* If every argument to the PHI produces the same result when
2655 ORed with the second comparison, we win.
2656 Do not do this unless the type is bool since we need a bool
2657 result here anyway. */
2658 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2660 tree result
= NULL_TREE
;
2662 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2664 tree arg
= gimple_phi_arg_def (stmt
, i
);
2666 /* If this PHI has itself as an argument, ignore it.
2667 If all the other args produce the same result,
2669 if (arg
== gimple_phi_result (stmt
))
2671 else if (TREE_CODE (arg
) == INTEGER_CST
)
2673 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2676 result
= boolean_true_node
;
2677 else if (!integer_onep (result
))
2681 result
= fold_build2 (code2
, boolean_type_node
,
2683 else if (!same_bool_comparison_p (result
,
2687 else if (TREE_CODE (arg
) == SSA_NAME
)
2689 tree temp
= or_var_with_comparison (arg
, invert
,
2695 else if (!same_bool_result_p (result
, temp
))
2711 /* Try to simplify the OR of two comparisons, specified by
2712 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2713 If this can be simplified to a single expression (without requiring
2714 introducing more SSA variables to hold intermediate values),
2715 return the resulting tree. Otherwise return NULL_TREE.
2716 If the result expression is non-null, it has boolean type. */
2719 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2720 enum tree_code code2
, tree op2a
, tree op2b
)
2722 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2726 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);