1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
34 /* Return true when DECL can be referenced from current unit.
35 We can get declarations that are not possible to reference for
38 1) When analyzing C++ virtual tables.
39 C++ virtual tables do have known constructors even
40 when they are keyed to other compilation unit.
41 Those tables can contain pointers to methods and vars
42 in other units. Those methods have both STATIC and EXTERNAL
44 2) In WHOPR mode devirtualization might lead to reference
45 to method that was partitioned elsehwere.
46 In this case we have static VAR_DECL or FUNCTION_DECL
47 that has no corresponding callgraph/varpool node
49 3) COMDAT functions referred by external vtables that
50 we devirtualize only during final copmilation stage.
51 At this time we already decided that we will not output
52 the function body and thus we can't reference the symbol
56 can_refer_decl_in_current_unit_p (tree decl
)
58 struct varpool_node
*vnode
;
59 struct cgraph_node
*node
;
61 if (!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
63 /* External flag is set, so we deal with C++ reference
64 to static object from other file. */
65 if (DECL_EXTERNAL (decl
) && TREE_STATIC (decl
)
66 && TREE_CODE (decl
) == VAR_DECL
)
68 /* Just be sure it is not big in frontend setting
69 flags incorrectly. Those variables should never
71 gcc_checking_assert (!(vnode
= varpool_get_node (decl
))
72 || !vnode
->finalized
);
75 /* When function is public, we always can introduce new reference.
76 Exception are the COMDAT functions where introducing a direct
77 reference imply need to include function body in the curren tunit. */
78 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
80 /* We are not at ltrans stage; so don't worry about WHOPR.
81 Also when still gimplifying all referred comdat functions will be
83 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
85 /* If we already output the function body, we are safe. */
86 if (TREE_ASM_WRITTEN (decl
))
88 if (TREE_CODE (decl
) == FUNCTION_DECL
)
90 node
= cgraph_get_node (decl
);
91 /* Check that we still have function body and that we didn't took
92 the decision to eliminate offline copy of the function yet.
93 The second is important when devirtualization happens during final
94 compilation stage when making a new reference no longer makes callee
96 if (!node
|| !node
->analyzed
|| node
->global
.inlined_to
)
99 else if (TREE_CODE (decl
) == VAR_DECL
)
101 vnode
= varpool_get_node (decl
);
102 if (!vnode
|| !vnode
->finalized
)
108 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
109 acceptable form for is_gimple_min_invariant. */
112 canonicalize_constructor_val (tree cval
)
115 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
)
117 tree t
= maybe_fold_offset_to_address (EXPR_LOCATION (cval
),
118 TREE_OPERAND (cval
, 0),
119 TREE_OPERAND (cval
, 1),
124 if (TREE_CODE (cval
) == ADDR_EXPR
)
126 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
129 && (TREE_CODE (base
) == VAR_DECL
130 || TREE_CODE (base
) == FUNCTION_DECL
)
131 && !can_refer_decl_in_current_unit_p (base
))
133 if (base
&& TREE_CODE (base
) == VAR_DECL
)
134 add_referenced_var (base
);
139 /* If SYM is a constant variable with known value, return the value.
140 NULL_TREE is returned otherwise. */
143 get_symbol_constant_value (tree sym
)
145 if (const_value_known_p (sym
))
147 tree val
= DECL_INITIAL (sym
);
150 val
= canonicalize_constructor_val (val
);
151 if (val
&& is_gimple_min_invariant (val
))
156 /* Variables declared 'const' without an initializer
157 have zero as the initializer if they may not be
158 overridden at link or run time. */
160 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
161 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
162 return fold_convert (TREE_TYPE (sym
), integer_zero_node
);
169 /* Return true if we may propagate the address expression ADDR into the
170 dereference DEREF and cancel them. */
173 may_propagate_address_into_dereference (tree addr
, tree deref
)
175 gcc_assert (TREE_CODE (deref
) == MEM_REF
176 && TREE_CODE (addr
) == ADDR_EXPR
);
178 /* Don't propagate if ADDR's operand has incomplete type. */
179 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
182 /* If the address is invariant then we do not need to preserve restrict
183 qualifications. But we do need to preserve volatile qualifiers until
184 we can annotate the folded dereference itself properly. */
185 if (is_gimple_min_invariant (addr
)
186 && (!TREE_THIS_VOLATILE (deref
)
187 || TYPE_VOLATILE (TREE_TYPE (addr
))))
188 return useless_type_conversion_p (TREE_TYPE (deref
),
189 TREE_TYPE (TREE_OPERAND (addr
, 0)));
191 /* Else both the address substitution and the folding must result in
192 a valid useless type conversion sequence. */
193 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
195 && useless_type_conversion_p (TREE_TYPE (deref
),
196 TREE_TYPE (TREE_OPERAND (addr
, 0))));
200 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
201 BASE is an array type. OFFSET is a byte displacement.
203 LOC is the location of the original expression. */
206 maybe_fold_offset_to_array_ref (location_t loc
, tree base
, tree offset
)
208 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
209 tree array_type
, elt_type
, elt_size
;
212 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
213 measured in units of the size of elements type) from that ARRAY_REF).
214 We can't do anything if either is variable.
216 The case we handle here is *(&A[N]+O). */
217 if (TREE_CODE (base
) == ARRAY_REF
)
219 tree low_bound
= array_ref_low_bound (base
);
221 elt_offset
= TREE_OPERAND (base
, 1);
222 if (TREE_CODE (low_bound
) != INTEGER_CST
223 || TREE_CODE (elt_offset
) != INTEGER_CST
)
226 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
227 base
= TREE_OPERAND (base
, 0);
230 /* Ignore stupid user tricks of indexing non-array variables. */
231 array_type
= TREE_TYPE (base
);
232 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
234 elt_type
= TREE_TYPE (array_type
);
236 /* Use signed size type for intermediate computation on the index. */
237 idx_type
= ssizetype
;
239 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
240 element type (so we can use the alignment if it's not constant).
241 Otherwise, compute the offset as an index by using a division. If the
242 division isn't exact, then don't do anything. */
243 elt_size
= TYPE_SIZE_UNIT (elt_type
);
246 if (integer_zerop (offset
))
248 if (TREE_CODE (elt_size
) != INTEGER_CST
)
249 elt_size
= size_int (TYPE_ALIGN (elt_type
));
251 idx
= build_int_cst (idx_type
, 0);
255 unsigned HOST_WIDE_INT lquo
, lrem
;
256 HOST_WIDE_INT hquo
, hrem
;
259 /* The final array offset should be signed, so we need
260 to sign-extend the (possibly pointer) offset here
261 and use signed division. */
262 soffset
= double_int_sext (tree_to_double_int (offset
),
263 TYPE_PRECISION (TREE_TYPE (offset
)));
264 if (TREE_CODE (elt_size
) != INTEGER_CST
265 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
266 soffset
.low
, soffset
.high
,
267 TREE_INT_CST_LOW (elt_size
),
268 TREE_INT_CST_HIGH (elt_size
),
269 &lquo
, &hquo
, &lrem
, &hrem
)
273 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
276 /* Assume the low bound is zero. If there is a domain type, get the
277 low bound, if any, convert the index into that type, and add the
279 min_idx
= build_int_cst (idx_type
, 0);
280 domain_type
= TYPE_DOMAIN (array_type
);
283 idx_type
= domain_type
;
284 if (TYPE_MIN_VALUE (idx_type
))
285 min_idx
= TYPE_MIN_VALUE (idx_type
);
287 min_idx
= fold_convert (idx_type
, min_idx
);
289 if (TREE_CODE (min_idx
) != INTEGER_CST
)
292 elt_offset
= fold_convert (idx_type
, elt_offset
);
295 if (!integer_zerop (min_idx
))
296 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
297 if (!integer_zerop (elt_offset
))
298 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
300 /* Make sure to possibly truncate late after offsetting. */
301 idx
= fold_convert (idx_type
, idx
);
303 /* We don't want to construct access past array bounds. For example
306 should not be simplified into (*c)[14] or tree-vrp will
308 This is only an issue for multi-dimensional arrays. */
309 if (TREE_CODE (elt_type
) == ARRAY_TYPE
312 if (TYPE_MAX_VALUE (domain_type
)
313 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
314 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type
), idx
))
316 else if (TYPE_MIN_VALUE (domain_type
)
317 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
318 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
320 else if (compare_tree_int (idx
, 0) < 0)
325 tree t
= build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
326 SET_EXPR_LOCATION (t
, loc
);
332 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
333 LOC is the location of original expression.
335 Before attempting the conversion strip off existing ADDR_EXPRs. */
338 maybe_fold_offset_to_reference (location_t loc
, tree base
, tree offset
,
344 if (TREE_CODE (base
) != ADDR_EXPR
)
347 base
= TREE_OPERAND (base
, 0);
348 if (types_compatible_p (orig_type
, TREE_TYPE (base
))
349 && integer_zerop (offset
))
352 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
);
353 if (ret
&& types_compatible_p (orig_type
, TREE_TYPE (ret
)))
358 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
359 LOC is the location of the original expression. */
362 maybe_fold_offset_to_address (location_t loc
, tree addr
, tree offset
,
368 if (TREE_CODE (addr
) != ADDR_EXPR
)
370 base
= TREE_OPERAND (addr
, 0);
371 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
);
374 ret
= build_fold_addr_expr (ret
);
375 if (!useless_type_conversion_p (orig_type
, TREE_TYPE (ret
)))
377 SET_EXPR_LOCATION (ret
, loc
);
384 /* A quaint feature extant in our address arithmetic is that there
385 can be hidden type changes here. The type of the result need
386 not be the same as the type of the input pointer.
388 What we're after here is an expression of the form
389 (T *)(&array + const)
390 where array is OP0, const is OP1, RES_TYPE is T and
391 the cast doesn't actually exist, but is implicit in the
392 type of the POINTER_PLUS_EXPR. We'd like to turn this into
394 which may be able to propagate further. */
397 maybe_fold_stmt_addition (location_t loc
, tree res_type
, tree op0
, tree op1
)
402 /* The first operand should be an ADDR_EXPR. */
403 if (TREE_CODE (op0
) != ADDR_EXPR
)
405 op0
= TREE_OPERAND (op0
, 0);
407 /* It had better be a constant. */
408 if (TREE_CODE (op1
) != INTEGER_CST
)
410 /* Or op0 should now be A[0] and the non-constant offset defined
411 via a multiplication by the array element size. */
412 if (TREE_CODE (op0
) == ARRAY_REF
413 /* As we will end up creating a variable index array access
414 in the outermost array dimension make sure there isn't
415 a more inner array that the index could overflow to. */
416 && TREE_CODE (TREE_OPERAND (op0
, 0)) != ARRAY_REF
417 && integer_zerop (TREE_OPERAND (op0
, 1))
418 && TREE_CODE (op1
) == SSA_NAME
)
420 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
421 tree elsz
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
422 if (!host_integerp (elsz
, 1)
423 || !is_gimple_assign (offset_def
))
426 /* Do not build array references of something that we can't
427 see the true number of array dimensions for. */
428 if (!DECL_P (TREE_OPERAND (op0
, 0))
429 && !handled_component_p (TREE_OPERAND (op0
, 0)))
432 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
433 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
434 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
), elsz
))
435 return build_fold_addr_expr
436 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
437 TREE_OPERAND (op0
, 0),
438 gimple_assign_rhs1 (offset_def
),
439 TREE_OPERAND (op0
, 2),
440 TREE_OPERAND (op0
, 3)));
441 else if (integer_onep (elsz
)
442 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
443 return build_fold_addr_expr
444 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
445 TREE_OPERAND (op0
, 0),
447 TREE_OPERAND (op0
, 2),
448 TREE_OPERAND (op0
, 3)));
450 else if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
452 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0
))) != ARRAY_TYPE
453 && TREE_CODE (op1
) == SSA_NAME
)
455 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
456 tree elsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0
)));
457 if (!host_integerp (elsz
, 1)
458 || !is_gimple_assign (offset_def
))
461 /* Do not build array references of something that we can't
462 see the true number of array dimensions for. */
464 && !handled_component_p (op0
))
467 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
468 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
469 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
), elsz
))
470 return build_fold_addr_expr
471 (build4 (ARRAY_REF
, TREE_TYPE (TREE_TYPE (op0
)),
472 op0
, gimple_assign_rhs1 (offset_def
),
473 integer_zero_node
, NULL_TREE
));
474 else if (integer_onep (elsz
)
475 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
476 return build_fold_addr_expr
477 (build4 (ARRAY_REF
, TREE_TYPE (TREE_TYPE (op0
)),
479 integer_zero_node
, NULL_TREE
));
485 /* If the first operand is an ARRAY_REF, expand it so that we can fold
486 the offset into it. */
487 while (TREE_CODE (op0
) == ARRAY_REF
)
489 tree array_obj
= TREE_OPERAND (op0
, 0);
490 tree array_idx
= TREE_OPERAND (op0
, 1);
491 tree elt_type
= TREE_TYPE (op0
);
492 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
495 if (TREE_CODE (array_idx
) != INTEGER_CST
)
497 if (TREE_CODE (elt_size
) != INTEGER_CST
)
500 /* Un-bias the index by the min index of the array type. */
501 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
504 min_idx
= TYPE_MIN_VALUE (min_idx
);
507 if (TREE_CODE (min_idx
) != INTEGER_CST
)
510 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
511 if (!integer_zerop (min_idx
))
512 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
517 /* Convert the index to a byte offset. */
518 array_idx
= fold_convert (sizetype
, array_idx
);
519 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
521 /* Update the operands for the next round, or for folding. */
522 op1
= int_const_binop (PLUS_EXPR
,
527 ptd_type
= TREE_TYPE (res_type
);
528 /* If we want a pointer to void, reconstruct the reference from the
529 array element type. A pointer to that can be trivially converted
530 to void *. This happens as we fold (void *)(ptr p+ off). */
531 if (VOID_TYPE_P (ptd_type
)
532 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
533 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
535 /* At which point we can try some of the same things as for indirects. */
536 t
= maybe_fold_offset_to_array_ref (loc
, op0
, op1
);
539 t
= build_fold_addr_expr (t
);
540 if (!useless_type_conversion_p (res_type
, TREE_TYPE (t
)))
542 SET_EXPR_LOCATION (t
, loc
);
548 /* Subroutine of fold_stmt. We perform several simplifications of the
549 memory reference tree EXPR and make sure to re-gimplify them properly
550 after propagation of constant addresses. IS_LHS is true if the
551 reference is supposed to be an lvalue. */
554 maybe_fold_reference (tree expr
, bool is_lhs
)
560 && (result
= fold_const_aggregate_ref (expr
))
561 && is_gimple_min_invariant (result
))
564 /* ??? We might want to open-code the relevant remaining cases
565 to avoid using the generic fold. */
566 if (handled_component_p (*t
)
567 && CONSTANT_CLASS_P (TREE_OPERAND (*t
, 0)))
569 tree tem
= fold (*t
);
574 while (handled_component_p (*t
))
575 t
= &TREE_OPERAND (*t
, 0);
577 /* Fold back MEM_REFs to reference trees. */
578 if (TREE_CODE (*t
) == MEM_REF
579 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
580 && integer_zerop (TREE_OPERAND (*t
, 1))
581 && (TREE_THIS_VOLATILE (*t
)
582 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
583 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
584 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
585 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
586 /* We have to look out here to not drop a required conversion
587 from the rhs to the lhs if is_lhs, but we don't have the
588 rhs here to verify that. Thus require strict type
590 && types_compatible_p (TREE_TYPE (*t
),
591 TREE_TYPE (TREE_OPERAND
592 (TREE_OPERAND (*t
, 0), 0))))
595 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
596 tem
= maybe_fold_reference (expr
, is_lhs
);
601 /* Canonicalize MEM_REFs invariant address operand. */
602 else if (TREE_CODE (*t
) == MEM_REF
603 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
604 && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))
605 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
607 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
608 TREE_OPERAND (*t
, 0),
609 TREE_OPERAND (*t
, 1));
613 tem
= maybe_fold_reference (expr
, is_lhs
);
619 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
621 tree tem
= maybe_fold_tmr (*t
);
625 tem
= maybe_fold_reference (expr
, is_lhs
);
634 tree tem
= get_symbol_constant_value (*t
);
636 && useless_type_conversion_p (TREE_TYPE (*t
), TREE_TYPE (tem
)))
638 *t
= unshare_expr (tem
);
639 tem
= maybe_fold_reference (expr
, is_lhs
);
650 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
651 replacement rhs for the statement or NULL_TREE if no simplification
652 could be made. It is assumed that the operands have been previously
656 fold_gimple_assign (gimple_stmt_iterator
*si
)
658 gimple stmt
= gsi_stmt (*si
);
659 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
660 location_t loc
= gimple_location (stmt
);
662 tree result
= NULL_TREE
;
664 switch (get_gimple_rhs_class (subcode
))
666 case GIMPLE_SINGLE_RHS
:
668 tree rhs
= gimple_assign_rhs1 (stmt
);
670 /* Try to fold a conditional expression. */
671 if (TREE_CODE (rhs
) == COND_EXPR
)
673 tree op0
= COND_EXPR_COND (rhs
);
676 location_t cond_loc
= EXPR_LOCATION (rhs
);
678 if (COMPARISON_CLASS_P (op0
))
680 fold_defer_overflow_warnings ();
681 tem
= fold_binary_loc (cond_loc
,
682 TREE_CODE (op0
), TREE_TYPE (op0
),
683 TREE_OPERAND (op0
, 0),
684 TREE_OPERAND (op0
, 1));
685 /* This is actually a conditional expression, not a GIMPLE
686 conditional statement, however, the valid_gimple_rhs_p
687 test still applies. */
688 set
= (tem
&& is_gimple_condexpr (tem
)
689 && valid_gimple_rhs_p (tem
));
690 fold_undefer_overflow_warnings (set
, stmt
, 0);
692 else if (is_gimple_min_invariant (op0
))
701 result
= fold_build3_loc (cond_loc
, COND_EXPR
, TREE_TYPE (rhs
), tem
,
702 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
705 else if (REFERENCE_CLASS_P (rhs
))
706 return maybe_fold_reference (rhs
, false);
708 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
710 tree ref
= TREE_OPERAND (rhs
, 0);
711 tree tem
= maybe_fold_reference (ref
, true);
713 && TREE_CODE (tem
) == MEM_REF
714 && integer_zerop (TREE_OPERAND (tem
, 1)))
715 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
717 result
= fold_convert (TREE_TYPE (rhs
),
718 build_fold_addr_expr_loc (loc
, tem
));
719 else if (TREE_CODE (ref
) == MEM_REF
720 && integer_zerop (TREE_OPERAND (ref
, 1)))
721 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
724 else if (TREE_CODE (rhs
) == CONSTRUCTOR
725 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
726 && (CONSTRUCTOR_NELTS (rhs
)
727 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
729 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
733 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
734 if (TREE_CODE (val
) != INTEGER_CST
735 && TREE_CODE (val
) != REAL_CST
736 && TREE_CODE (val
) != FIXED_CST
)
739 return build_vector_from_ctor (TREE_TYPE (rhs
),
740 CONSTRUCTOR_ELTS (rhs
));
743 else if (DECL_P (rhs
))
744 return unshare_expr (get_symbol_constant_value (rhs
));
746 /* If we couldn't fold the RHS, hand over to the generic
748 if (result
== NULL_TREE
)
751 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
752 that may have been added by fold, and "useless" type
753 conversions that might now be apparent due to propagation. */
754 STRIP_USELESS_TYPE_CONVERSION (result
);
756 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
763 case GIMPLE_UNARY_RHS
:
765 tree rhs
= gimple_assign_rhs1 (stmt
);
767 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
770 /* If the operation was a conversion do _not_ mark a
771 resulting constant with TREE_OVERFLOW if the original
772 constant was not. These conversions have implementation
773 defined behavior and retaining the TREE_OVERFLOW flag
774 here would confuse later passes such as VRP. */
775 if (CONVERT_EXPR_CODE_P (subcode
)
776 && TREE_CODE (result
) == INTEGER_CST
777 && TREE_CODE (rhs
) == INTEGER_CST
)
778 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
780 STRIP_USELESS_TYPE_CONVERSION (result
);
781 if (valid_gimple_rhs_p (result
))
784 else if (CONVERT_EXPR_CODE_P (subcode
)
785 && POINTER_TYPE_P (gimple_expr_type (stmt
))
786 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
788 tree type
= gimple_expr_type (stmt
);
789 tree t
= maybe_fold_offset_to_address (loc
,
790 gimple_assign_rhs1 (stmt
),
791 integer_zero_node
, type
);
798 case GIMPLE_BINARY_RHS
:
799 /* Try to fold pointer addition. */
800 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
802 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
803 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
805 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
806 if (!useless_type_conversion_p
807 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
808 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
810 result
= maybe_fold_stmt_addition (gimple_location (stmt
),
812 gimple_assign_rhs1 (stmt
),
813 gimple_assign_rhs2 (stmt
));
817 result
= fold_binary_loc (loc
, subcode
,
818 TREE_TYPE (gimple_assign_lhs (stmt
)),
819 gimple_assign_rhs1 (stmt
),
820 gimple_assign_rhs2 (stmt
));
824 STRIP_USELESS_TYPE_CONVERSION (result
);
825 if (valid_gimple_rhs_p (result
))
828 /* Fold might have produced non-GIMPLE, so if we trust it blindly
829 we lose canonicalization opportunities. Do not go again
830 through fold here though, or the same non-GIMPLE will be
832 if (commutative_tree_code (subcode
)
833 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
834 gimple_assign_rhs2 (stmt
), false))
835 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
836 gimple_assign_rhs2 (stmt
),
837 gimple_assign_rhs1 (stmt
));
841 case GIMPLE_TERNARY_RHS
:
842 result
= fold_ternary_loc (loc
, subcode
,
843 TREE_TYPE (gimple_assign_lhs (stmt
)),
844 gimple_assign_rhs1 (stmt
),
845 gimple_assign_rhs2 (stmt
),
846 gimple_assign_rhs3 (stmt
));
850 STRIP_USELESS_TYPE_CONVERSION (result
);
851 if (valid_gimple_rhs_p (result
))
854 /* Fold might have produced non-GIMPLE, so if we trust it blindly
855 we lose canonicalization opportunities. Do not go again
856 through fold here though, or the same non-GIMPLE will be
858 if (commutative_ternary_tree_code (subcode
)
859 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
860 gimple_assign_rhs2 (stmt
), false))
861 return build3 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
862 gimple_assign_rhs2 (stmt
),
863 gimple_assign_rhs1 (stmt
),
864 gimple_assign_rhs3 (stmt
));
868 case GIMPLE_INVALID_RHS
:
875 /* Attempt to fold a conditional statement. Return true if any changes were
876 made. We only attempt to fold the condition expression, and do not perform
877 any transformation that would require alteration of the cfg. It is
878 assumed that the operands have been previously folded. */
881 fold_gimple_cond (gimple stmt
)
883 tree result
= fold_binary_loc (gimple_location (stmt
),
884 gimple_cond_code (stmt
),
886 gimple_cond_lhs (stmt
),
887 gimple_cond_rhs (stmt
));
891 STRIP_USELESS_TYPE_CONVERSION (result
);
892 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
894 gimple_cond_set_condition_from_tree (stmt
, result
);
902 /* Convert EXPR into a GIMPLE value suitable for substitution on the
903 RHS of an assignment. Insert the necessary statements before
904 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
905 is replaced. If the call is expected to produces a result, then it
906 is replaced by an assignment of the new RHS to the result variable.
907 If the result is to be ignored, then the call is replaced by a
908 GIMPLE_NOP. A proper VDEF chain is retained by making the first
909 VUSE and the last VDEF of the whole sequence be the same as the replaced
910 statement and using new SSA names for stores in between. */
913 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
916 tree tmp
= NULL_TREE
; /* Silence warning. */
917 gimple stmt
, new_stmt
;
918 gimple_stmt_iterator i
;
919 gimple_seq stmts
= gimple_seq_alloc();
920 struct gimplify_ctx gctx
;
922 gimple laststore
= NULL
;
925 stmt
= gsi_stmt (*si_p
);
927 gcc_assert (is_gimple_call (stmt
));
929 lhs
= gimple_call_lhs (stmt
);
930 reaching_vuse
= gimple_vuse (stmt
);
932 push_gimplify_context (&gctx
);
934 if (lhs
== NULL_TREE
)
936 gimplify_and_add (expr
, &stmts
);
937 /* We can end up with folding a memcpy of an empty class assignment
938 which gets optimized away by C++ gimplification. */
939 if (gimple_seq_empty_p (stmts
))
941 if (gimple_in_ssa_p (cfun
))
943 unlink_stmt_vdef (stmt
);
946 gsi_remove (si_p
, true);
951 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
953 pop_gimplify_context (NULL
);
955 if (gimple_has_location (stmt
))
956 annotate_all_with_location (stmts
, gimple_location (stmt
));
958 /* The replacement can expose previously unreferenced variables. */
959 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
963 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
966 new_stmt
= gsi_stmt (i
);
967 if (gimple_in_ssa_p (cfun
))
969 find_new_referenced_vars (new_stmt
);
970 mark_symbols_for_renaming (new_stmt
);
972 /* If the new statement has a VUSE, update it with exact SSA name we
973 know will reach this one. */
974 if (gimple_vuse (new_stmt
))
976 /* If we've also seen a previous store create a new VDEF for
977 the latter one, and make that the new reaching VUSE. */
980 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
981 gimple_set_vdef (laststore
, reaching_vuse
);
982 update_stmt (laststore
);
985 gimple_set_vuse (new_stmt
, reaching_vuse
);
986 gimple_set_modified (new_stmt
, true);
988 if (gimple_assign_single_p (new_stmt
)
989 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
991 laststore
= new_stmt
;
996 if (lhs
== NULL_TREE
)
998 /* If we replace a call without LHS that has a VDEF and our new
999 sequence ends with a store we must make that store have the same
1000 vdef in order not to break the sequencing. This can happen
1001 for instance when folding memcpy calls into assignments. */
1002 if (gimple_vdef (stmt
) && laststore
)
1004 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
1005 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1006 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
1007 update_stmt (laststore
);
1009 else if (gimple_in_ssa_p (cfun
))
1011 unlink_stmt_vdef (stmt
);
1012 release_defs (stmt
);
1020 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
1023 if (laststore
&& is_gimple_reg (lhs
))
1025 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
1026 update_stmt (laststore
);
1027 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1028 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
1033 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
1034 gimple_set_vdef (laststore
, reaching_vuse
);
1035 update_stmt (laststore
);
1038 new_stmt
= gimple_build_assign (lhs
, tmp
);
1039 if (!is_gimple_reg (tmp
))
1040 gimple_set_vuse (new_stmt
, reaching_vuse
);
1041 if (!is_gimple_reg (lhs
))
1043 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1044 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1045 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = new_stmt
;
1047 else if (reaching_vuse
== gimple_vuse (stmt
))
1048 unlink_stmt_vdef (stmt
);
1051 gimple_set_location (new_stmt
, gimple_location (stmt
));
1052 gsi_replace (si_p
, new_stmt
, false);
1055 /* Return the string length, maximum string length or maximum value of
1057 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1058 is not NULL and, for TYPE == 0, its value is not equal to the length
1059 we determine or if we are unable to determine the length or value,
1060 return false. VISITED is a bitmap of visited variables.
1061 TYPE is 0 if string length should be returned, 1 for maximum string
1062 length and 2 for maximum value ARG can have. */
1065 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
1070 if (TREE_CODE (arg
) != SSA_NAME
)
1072 if (TREE_CODE (arg
) == COND_EXPR
)
1073 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
1074 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
1075 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1076 else if (TREE_CODE (arg
) == ADDR_EXPR
1077 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1078 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1080 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1081 if (TREE_CODE (aop0
) == INDIRECT_REF
1082 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1083 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1084 length
, visited
, type
);
1090 if (TREE_CODE (val
) != INTEGER_CST
1091 || tree_int_cst_sgn (val
) < 0)
1095 val
= c_strlen (arg
, 1);
1103 if (TREE_CODE (*length
) != INTEGER_CST
1104 || TREE_CODE (val
) != INTEGER_CST
)
1107 if (tree_int_cst_lt (*length
, val
))
1111 else if (simple_cst_equal (val
, *length
) != 1)
1119 /* If we were already here, break the infinite cycle. */
1120 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
1124 def_stmt
= SSA_NAME_DEF_STMT (var
);
1126 switch (gimple_code (def_stmt
))
1129 /* The RHS of the statement defining VAR must either have a
1130 constant length or come from another SSA_NAME with a constant
1132 if (gimple_assign_single_p (def_stmt
)
1133 || gimple_assign_unary_nop_p (def_stmt
))
1135 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1136 return get_maxval_strlen (rhs
, length
, visited
, type
);
1142 /* All the arguments of the PHI node must have the same constant
1146 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1148 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1150 /* If this PHI has itself as an argument, we cannot
1151 determine the string length of this argument. However,
1152 if we can find a constant string length for the other
1153 PHI args then we can still be sure that this is a
1154 constant string length. So be optimistic and just
1155 continue with the next argument. */
1156 if (arg
== gimple_phi_result (def_stmt
))
1159 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1171 /* Fold builtin call in statement STMT. Returns a simplified tree.
1172 We may return a non-constant expression, including another call
1173 to a different function and with different arguments, e.g.,
1174 substituting memcpy for strcpy when the string length is known.
1175 Note that some builtins expand into inline code that may not
1176 be valid in GIMPLE. Callers must take care. */
1179 gimple_fold_builtin (gimple stmt
)
1181 tree result
, val
[3];
1187 location_t loc
= gimple_location (stmt
);
1189 gcc_assert (is_gimple_call (stmt
));
1191 ignore
= (gimple_call_lhs (stmt
) == NULL
);
1193 /* First try the generic builtin folder. If that succeeds, return the
1195 result
= fold_call_stmt (stmt
, ignore
);
1199 STRIP_NOPS (result
);
1203 /* Ignore MD builtins. */
1204 callee
= gimple_call_fndecl (stmt
);
1205 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1208 /* If the builtin could not be folded, and it has no argument list,
1210 nargs
= gimple_call_num_args (stmt
);
1214 /* Limit the work only for builtins we know how to simplify. */
1215 switch (DECL_FUNCTION_CODE (callee
))
1217 case BUILT_IN_STRLEN
:
1218 case BUILT_IN_FPUTS
:
1219 case BUILT_IN_FPUTS_UNLOCKED
:
1223 case BUILT_IN_STRCPY
:
1224 case BUILT_IN_STRNCPY
:
1228 case BUILT_IN_MEMCPY_CHK
:
1229 case BUILT_IN_MEMPCPY_CHK
:
1230 case BUILT_IN_MEMMOVE_CHK
:
1231 case BUILT_IN_MEMSET_CHK
:
1232 case BUILT_IN_STRNCPY_CHK
:
1236 case BUILT_IN_STRCPY_CHK
:
1237 case BUILT_IN_STPCPY_CHK
:
1241 case BUILT_IN_SNPRINTF_CHK
:
1242 case BUILT_IN_VSNPRINTF_CHK
:
1250 if (arg_idx
>= nargs
)
1253 /* Try to use the dataflow information gathered by the CCP process. */
1254 visited
= BITMAP_ALLOC (NULL
);
1255 bitmap_clear (visited
);
1257 memset (val
, 0, sizeof (val
));
1258 a
= gimple_call_arg (stmt
, arg_idx
);
1259 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
1260 val
[arg_idx
] = NULL_TREE
;
1262 BITMAP_FREE (visited
);
1265 switch (DECL_FUNCTION_CODE (callee
))
1267 case BUILT_IN_STRLEN
:
1268 if (val
[0] && nargs
== 1)
1271 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
1273 /* If the result is not a valid gimple value, or not a cast
1274 of a valid gimple value, then we cannot use the result. */
1275 if (is_gimple_val (new_val
)
1276 || (is_gimple_cast (new_val
)
1277 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
1282 case BUILT_IN_STRCPY
:
1283 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
1284 result
= fold_builtin_strcpy (loc
, callee
,
1285 gimple_call_arg (stmt
, 0),
1286 gimple_call_arg (stmt
, 1),
1290 case BUILT_IN_STRNCPY
:
1291 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1292 result
= fold_builtin_strncpy (loc
, callee
,
1293 gimple_call_arg (stmt
, 0),
1294 gimple_call_arg (stmt
, 1),
1295 gimple_call_arg (stmt
, 2),
1299 case BUILT_IN_FPUTS
:
1301 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1302 gimple_call_arg (stmt
, 1),
1303 ignore
, false, val
[0]);
1306 case BUILT_IN_FPUTS_UNLOCKED
:
1308 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1309 gimple_call_arg (stmt
, 1),
1310 ignore
, true, val
[0]);
1313 case BUILT_IN_MEMCPY_CHK
:
1314 case BUILT_IN_MEMPCPY_CHK
:
1315 case BUILT_IN_MEMMOVE_CHK
:
1316 case BUILT_IN_MEMSET_CHK
:
1317 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1318 result
= fold_builtin_memory_chk (loc
, callee
,
1319 gimple_call_arg (stmt
, 0),
1320 gimple_call_arg (stmt
, 1),
1321 gimple_call_arg (stmt
, 2),
1322 gimple_call_arg (stmt
, 3),
1324 DECL_FUNCTION_CODE (callee
));
1327 case BUILT_IN_STRCPY_CHK
:
1328 case BUILT_IN_STPCPY_CHK
:
1329 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1330 result
= fold_builtin_stxcpy_chk (loc
, callee
,
1331 gimple_call_arg (stmt
, 0),
1332 gimple_call_arg (stmt
, 1),
1333 gimple_call_arg (stmt
, 2),
1335 DECL_FUNCTION_CODE (callee
));
1338 case BUILT_IN_STRNCPY_CHK
:
1339 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1340 result
= fold_builtin_strncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1341 gimple_call_arg (stmt
, 1),
1342 gimple_call_arg (stmt
, 2),
1343 gimple_call_arg (stmt
, 3),
1347 case BUILT_IN_SNPRINTF_CHK
:
1348 case BUILT_IN_VSNPRINTF_CHK
:
1349 if (val
[1] && is_gimple_val (val
[1]))
1350 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1351 DECL_FUNCTION_CODE (callee
));
1358 if (result
&& ignore
)
1359 result
= fold_ignored_result (result
);
1363 /* Return the first of the base binfos of BINFO that has virtual functions. */
1366 get_first_base_binfo_with_virtuals (tree binfo
)
1371 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
1372 if (BINFO_VIRTUALS (base_binfo
))
1379 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1380 it is found or NULL_TREE if it is not. */
1383 get_base_binfo_for_type (tree binfo
, tree type
)
1387 tree res
= NULL_TREE
;
1389 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
1390 if (TREE_TYPE (base_binfo
) == type
)
1399 /* Return a binfo describing the part of object referenced by expression REF.
1400 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1401 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1402 a simple declaration, indirect reference or an SSA_NAME. If the function
1403 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1404 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1405 Otherwise the first non-artificial field declaration or the base declaration
1406 will be examined to get the encapsulating type. */
1409 gimple_get_relevant_ref_binfo (tree ref
, tree known_binfo
)
1413 if (TREE_CODE (ref
) == COMPONENT_REF
)
1416 tree binfo
, base_binfo
;
1417 tree field
= TREE_OPERAND (ref
, 1);
1419 if (!DECL_ARTIFICIAL (field
))
1421 tree type
= TREE_TYPE (field
);
1422 if (TREE_CODE (type
) == RECORD_TYPE
)
1423 return TYPE_BINFO (type
);
1428 par_type
= TREE_TYPE (TREE_OPERAND (ref
, 0));
1429 binfo
= TYPE_BINFO (par_type
);
1431 || BINFO_N_BASE_BINFOS (binfo
) == 0)
1434 base_binfo
= get_first_base_binfo_with_virtuals (binfo
);
1435 if (base_binfo
&& BINFO_TYPE (base_binfo
) != TREE_TYPE (field
))
1439 d_binfo
= gimple_get_relevant_ref_binfo (TREE_OPERAND (ref
, 0),
1441 /* Get descendant binfo. */
1444 return get_base_binfo_for_type (d_binfo
, TREE_TYPE (field
));
1447 ref
= TREE_OPERAND (ref
, 0);
1449 else if (DECL_P (ref
) && TREE_CODE (TREE_TYPE (ref
)) == RECORD_TYPE
)
1450 return TYPE_BINFO (TREE_TYPE (ref
));
1451 else if (known_binfo
1452 && (TREE_CODE (ref
) == SSA_NAME
1453 || TREE_CODE (ref
) == MEM_REF
))
1460 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1461 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1462 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1465 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token
, tree known_binfo
)
1468 tree v
, fndecl
, delta
;
1470 v
= BINFO_VIRTUALS (known_binfo
);
1474 i
+= (TARGET_VTABLE_USES_DESCRIPTORS
1475 ? TARGET_VTABLE_USES_DESCRIPTORS
: 1);
1479 fndecl
= TREE_VALUE (v
);
1480 delta
= TREE_PURPOSE (v
);
1481 gcc_assert (host_integerp (delta
, 0));
1483 if (integer_nonzerop (delta
))
1485 struct cgraph_node
*node
= cgraph_get_node (fndecl
);
1486 HOST_WIDE_INT off
= tree_low_cst (delta
, 0);
1490 for (node
= node
->same_body
; node
; node
= node
->next
)
1491 if (node
->thunk
.thunk_p
&& off
== node
->thunk
.fixed_offset
)
1494 fndecl
= node
->decl
;
1499 /* When cgraph node is missing and function is not public, we cannot
1500 devirtualize. This can happen in WHOPR when the actual method
1501 ends up in other partition, because we found devirtualization
1502 possibility too late. */
1503 if (!can_refer_decl_in_current_unit_p (fndecl
))
1505 return build_fold_addr_expr (fndecl
);
1509 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1510 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1511 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1512 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1513 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1516 gimple_fold_obj_type_ref (tree ref
, tree known_type
)
1518 tree obj
= OBJ_TYPE_REF_OBJECT (ref
);
1519 tree known_binfo
= known_type
? TYPE_BINFO (known_type
) : NULL_TREE
;
1522 if (TREE_CODE (obj
) == ADDR_EXPR
)
1523 obj
= TREE_OPERAND (obj
, 0);
1525 binfo
= gimple_get_relevant_ref_binfo (obj
, known_binfo
);
1528 HOST_WIDE_INT token
= tree_low_cst (OBJ_TYPE_REF_TOKEN (ref
), 1);
1529 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1530 if (!BINFO_VIRTUALS (binfo
))
1532 return gimple_fold_obj_type_ref_known_binfo (token
, binfo
);
1538 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1539 The statement may be replaced by another statement, e.g., if the call
1540 simplifies to a constant value. Return true if any changes were made.
1541 It is assumed that the operands have been previously folded. */
1544 fold_gimple_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1546 gimple stmt
= gsi_stmt (*gsi
);
1548 tree callee
= gimple_call_fndecl (stmt
);
1550 /* Check for builtins that CCP can handle using information not
1551 available in the generic fold routines. */
1552 if (!inplace
&& callee
&& DECL_BUILT_IN (callee
))
1554 tree result
= gimple_fold_builtin (stmt
);
1558 if (!update_call_from_tree (gsi
, result
))
1559 gimplify_and_update_call_from_tree (gsi
, result
);
1565 /* ??? Should perhaps do this in fold proper. However, doing it
1566 there requires that we create a new CALL_EXPR, and that requires
1567 copying EH region info to the new node. Easier to just do it
1568 here where we can just smash the call operand. */
1569 callee
= gimple_call_fn (stmt
);
1570 if (TREE_CODE (callee
) == OBJ_TYPE_REF
1571 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
)
1575 t
= gimple_fold_obj_type_ref (callee
, NULL_TREE
);
1578 gimple_call_set_fn (stmt
, t
);
1587 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1588 distinguishes both cases. */
1591 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1593 bool changed
= false;
1594 gimple stmt
= gsi_stmt (*gsi
);
1597 /* Fold the main computation performed by the statement. */
1598 switch (gimple_code (stmt
))
1602 unsigned old_num_ops
= gimple_num_ops (stmt
);
1603 tree new_rhs
= fold_gimple_assign (gsi
);
1604 tree lhs
= gimple_assign_lhs (stmt
);
1606 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1607 TREE_TYPE (new_rhs
)))
1608 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1611 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1613 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1620 changed
|= fold_gimple_cond (stmt
);
1624 /* Fold *& in call arguments. */
1625 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1626 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1628 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1631 gimple_call_set_arg (stmt
, i
, tmp
);
1635 changed
|= fold_gimple_call (gsi
, inplace
);
1639 /* Fold *& in asm operands. */
1640 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1642 tree link
= gimple_asm_output_op (stmt
, i
);
1643 tree op
= TREE_VALUE (link
);
1644 if (REFERENCE_CLASS_P (op
)
1645 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1647 TREE_VALUE (link
) = op
;
1651 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1653 tree link
= gimple_asm_input_op (stmt
, i
);
1654 tree op
= TREE_VALUE (link
);
1655 if (REFERENCE_CLASS_P (op
)
1656 && (op
= maybe_fold_reference (op
, false)) != NULL_TREE
)
1658 TREE_VALUE (link
) = op
;
1665 if (gimple_debug_bind_p (stmt
))
1667 tree val
= gimple_debug_bind_get_value (stmt
);
1669 && REFERENCE_CLASS_P (val
))
1671 tree tem
= maybe_fold_reference (val
, false);
1674 gimple_debug_bind_set_value (stmt
, tem
);
1684 stmt
= gsi_stmt (*gsi
);
1686 /* Fold *& on the lhs. */
1687 if (gimple_has_lhs (stmt
))
1689 tree lhs
= gimple_get_lhs (stmt
);
1690 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1692 tree new_lhs
= maybe_fold_reference (lhs
, true);
1695 gimple_set_lhs (stmt
, new_lhs
);
1704 /* Fold the statement pointed to by GSI. In some cases, this function may
1705 replace the whole statement with a new one. Returns true iff folding
1707 The statement pointed to by GSI should be in valid gimple form but may
1708 be in unfolded state as resulting from for example constant propagation
1709 which can produce *&x = 0. */
1712 fold_stmt (gimple_stmt_iterator
*gsi
)
1714 return fold_stmt_1 (gsi
, false);
1717 /* Perform the minimal folding on statement STMT. Only operations like
1718 *&x created by constant propagation are handled. The statement cannot
1719 be replaced with a new one. Return true if the statement was
1720 changed, false otherwise.
1721 The statement STMT should be in valid gimple form but may
1722 be in unfolded state as resulting from for example constant propagation
1723 which can produce *&x = 0. */
1726 fold_stmt_inplace (gimple stmt
)
1728 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1729 bool changed
= fold_stmt_1 (&gsi
, true);
1730 gcc_assert (gsi_stmt (gsi
) == stmt
);
1734 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1735 if EXPR is null or we don't know how.
1736 If non-null, the result always has boolean type. */
1739 canonicalize_bool (tree expr
, bool invert
)
1745 if (integer_nonzerop (expr
))
1746 return boolean_false_node
;
1747 else if (integer_zerop (expr
))
1748 return boolean_true_node
;
1749 else if (TREE_CODE (expr
) == SSA_NAME
)
1750 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1751 build_int_cst (TREE_TYPE (expr
), 0));
1752 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1753 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1755 TREE_OPERAND (expr
, 0),
1756 TREE_OPERAND (expr
, 1));
1762 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1764 if (integer_nonzerop (expr
))
1765 return boolean_true_node
;
1766 else if (integer_zerop (expr
))
1767 return boolean_false_node
;
1768 else if (TREE_CODE (expr
) == SSA_NAME
)
1769 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1770 build_int_cst (TREE_TYPE (expr
), 0));
1771 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1772 return fold_build2 (TREE_CODE (expr
),
1774 TREE_OPERAND (expr
, 0),
1775 TREE_OPERAND (expr
, 1));
1781 /* Check to see if a boolean expression EXPR is logically equivalent to the
1782 comparison (OP1 CODE OP2). Check for various identities involving
1786 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1787 const_tree op1
, const_tree op2
)
1791 /* The obvious case. */
1792 if (TREE_CODE (expr
) == code
1793 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1794 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1797 /* Check for comparing (name, name != 0) and the case where expr
1798 is an SSA_NAME with a definition matching the comparison. */
1799 if (TREE_CODE (expr
) == SSA_NAME
1800 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1802 if (operand_equal_p (expr
, op1
, 0))
1803 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1804 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1805 s
= SSA_NAME_DEF_STMT (expr
);
1806 if (is_gimple_assign (s
)
1807 && gimple_assign_rhs_code (s
) == code
1808 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1809 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1813 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1814 of name is a comparison, recurse. */
1815 if (TREE_CODE (op1
) == SSA_NAME
1816 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1818 s
= SSA_NAME_DEF_STMT (op1
);
1819 if (is_gimple_assign (s
)
1820 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1822 enum tree_code c
= gimple_assign_rhs_code (s
);
1823 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1824 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1825 return same_bool_comparison_p (expr
, c
,
1826 gimple_assign_rhs1 (s
),
1827 gimple_assign_rhs2 (s
));
1828 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1829 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1830 return same_bool_comparison_p (expr
,
1831 invert_tree_comparison (c
, false),
1832 gimple_assign_rhs1 (s
),
1833 gimple_assign_rhs2 (s
));
1839 /* Check to see if two boolean expressions OP1 and OP2 are logically
1843 same_bool_result_p (const_tree op1
, const_tree op2
)
1845 /* Simple cases first. */
1846 if (operand_equal_p (op1
, op2
, 0))
1849 /* Check the cases where at least one of the operands is a comparison.
1850 These are a bit smarter than operand_equal_p in that they apply some
1851 identifies on SSA_NAMEs. */
1852 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1853 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1854 TREE_OPERAND (op2
, 0),
1855 TREE_OPERAND (op2
, 1)))
1857 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1858 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1859 TREE_OPERAND (op1
, 0),
1860 TREE_OPERAND (op1
, 1)))
1867 /* Forward declarations for some mutually recursive functions. */
1870 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1871 enum tree_code code2
, tree op2a
, tree op2b
);
1873 and_var_with_comparison (tree var
, bool invert
,
1874 enum tree_code code2
, tree op2a
, tree op2b
);
1876 and_var_with_comparison_1 (gimple stmt
,
1877 enum tree_code code2
, tree op2a
, tree op2b
);
1879 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1880 enum tree_code code2
, tree op2a
, tree op2b
);
1882 or_var_with_comparison (tree var
, bool invert
,
1883 enum tree_code code2
, tree op2a
, tree op2b
);
1885 or_var_with_comparison_1 (gimple stmt
,
1886 enum tree_code code2
, tree op2a
, tree op2b
);
1888 /* Helper function for and_comparisons_1: try to simplify the AND of the
1889 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1890 If INVERT is true, invert the value of the VAR before doing the AND.
1891 Return NULL_EXPR if we can't simplify this to a single expression. */
1894 and_var_with_comparison (tree var
, bool invert
,
1895 enum tree_code code2
, tree op2a
, tree op2b
)
1898 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1900 /* We can only deal with variables whose definitions are assignments. */
1901 if (!is_gimple_assign (stmt
))
1904 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1905 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1906 Then we only have to consider the simpler non-inverted cases. */
1908 t
= or_var_with_comparison_1 (stmt
,
1909 invert_tree_comparison (code2
, false),
1912 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1913 return canonicalize_bool (t
, invert
);
1916 /* Try to simplify the AND of the ssa variable defined by the assignment
1917 STMT with the comparison specified by (OP2A CODE2 OP2B).
1918 Return NULL_EXPR if we can't simplify this to a single expression. */
1921 and_var_with_comparison_1 (gimple stmt
,
1922 enum tree_code code2
, tree op2a
, tree op2b
)
1924 tree var
= gimple_assign_lhs (stmt
);
1925 tree true_test_var
= NULL_TREE
;
1926 tree false_test_var
= NULL_TREE
;
1927 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1929 /* Check for identities like (var AND (var == 0)) => false. */
1930 if (TREE_CODE (op2a
) == SSA_NAME
1931 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1933 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1934 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1936 true_test_var
= op2a
;
1937 if (var
== true_test_var
)
1940 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1941 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1943 false_test_var
= op2a
;
1944 if (var
== false_test_var
)
1945 return boolean_false_node
;
1949 /* If the definition is a comparison, recurse on it. */
1950 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1952 tree t
= and_comparisons_1 (innercode
,
1953 gimple_assign_rhs1 (stmt
),
1954 gimple_assign_rhs2 (stmt
),
1962 /* If the definition is an AND or OR expression, we may be able to
1963 simplify by reassociating. */
1964 if (innercode
== TRUTH_AND_EXPR
1965 || innercode
== TRUTH_OR_EXPR
1966 || (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1967 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
)))
1969 tree inner1
= gimple_assign_rhs1 (stmt
);
1970 tree inner2
= gimple_assign_rhs2 (stmt
);
1973 tree partial
= NULL_TREE
;
1974 bool is_and
= (innercode
== TRUTH_AND_EXPR
|| innercode
== BIT_AND_EXPR
);
1976 /* Check for boolean identities that don't require recursive examination
1978 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1979 inner1 AND (inner1 OR inner2) => inner1
1980 !inner1 AND (inner1 AND inner2) => false
1981 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1982 Likewise for similar cases involving inner2. */
1983 if (inner1
== true_test_var
)
1984 return (is_and
? var
: inner1
);
1985 else if (inner2
== true_test_var
)
1986 return (is_and
? var
: inner2
);
1987 else if (inner1
== false_test_var
)
1989 ? boolean_false_node
1990 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1991 else if (inner2
== false_test_var
)
1993 ? boolean_false_node
1994 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1996 /* Next, redistribute/reassociate the AND across the inner tests.
1997 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1998 if (TREE_CODE (inner1
) == SSA_NAME
1999 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2000 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2001 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
2002 gimple_assign_rhs1 (s
),
2003 gimple_assign_rhs2 (s
),
2004 code2
, op2a
, op2b
)))
2006 /* Handle the AND case, where we are reassociating:
2007 (inner1 AND inner2) AND (op2a code2 op2b)
2009 If the partial result t is a constant, we win. Otherwise
2010 continue on to try reassociating with the other inner test. */
2013 if (integer_onep (t
))
2015 else if (integer_zerop (t
))
2016 return boolean_false_node
;
2019 /* Handle the OR case, where we are redistributing:
2020 (inner1 OR inner2) AND (op2a code2 op2b)
2021 => (t OR (inner2 AND (op2a code2 op2b))) */
2024 if (integer_onep (t
))
2025 return boolean_true_node
;
2027 /* Save partial result for later. */
2032 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2033 if (TREE_CODE (inner2
) == SSA_NAME
2034 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2035 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2036 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
2037 gimple_assign_rhs1 (s
),
2038 gimple_assign_rhs2 (s
),
2039 code2
, op2a
, op2b
)))
2041 /* Handle the AND case, where we are reassociating:
2042 (inner1 AND inner2) AND (op2a code2 op2b)
2043 => (inner1 AND t) */
2046 if (integer_onep (t
))
2048 else if (integer_zerop (t
))
2049 return boolean_false_node
;
2052 /* Handle the OR case. where we are redistributing:
2053 (inner1 OR inner2) AND (op2a code2 op2b)
2054 => (t OR (inner1 AND (op2a code2 op2b)))
2055 => (t OR partial) */
2058 if (integer_onep (t
))
2059 return boolean_true_node
;
2062 /* We already got a simplification for the other
2063 operand to the redistributed OR expression. The
2064 interesting case is when at least one is false.
2065 Or, if both are the same, we can apply the identity
2067 if (integer_zerop (partial
))
2069 else if (integer_zerop (t
))
2071 else if (same_bool_result_p (t
, partial
))
2080 /* Try to simplify the AND of two comparisons defined by
2081 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2082 If this can be done without constructing an intermediate value,
2083 return the resulting tree; otherwise NULL_TREE is returned.
2084 This function is deliberately asymmetric as it recurses on SSA_DEFs
2085 in the first comparison but not the second. */
2088 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2089 enum tree_code code2
, tree op2a
, tree op2b
)
2091 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2092 if (operand_equal_p (op1a
, op2a
, 0)
2093 && operand_equal_p (op1b
, op2b
, 0))
2095 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2096 TRUTH_ANDIF_EXPR
, code1
, code2
,
2097 boolean_type_node
, op1a
, op1b
);
2102 /* Likewise the swapped case of the above. */
2103 if (operand_equal_p (op1a
, op2b
, 0)
2104 && operand_equal_p (op1b
, op2a
, 0))
2106 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2107 TRUTH_ANDIF_EXPR
, code1
,
2108 swap_tree_comparison (code2
),
2109 boolean_type_node
, op1a
, op1b
);
2114 /* If both comparisons are of the same value against constants, we might
2115 be able to merge them. */
2116 if (operand_equal_p (op1a
, op2a
, 0)
2117 && TREE_CODE (op1b
) == INTEGER_CST
2118 && TREE_CODE (op2b
) == INTEGER_CST
)
2120 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2122 /* If we have (op1a == op1b), we should either be able to
2123 return that or FALSE, depending on whether the constant op1b
2124 also satisfies the other comparison against op2b. */
2125 if (code1
== EQ_EXPR
)
2131 case EQ_EXPR
: val
= (cmp
== 0); break;
2132 case NE_EXPR
: val
= (cmp
!= 0); break;
2133 case LT_EXPR
: val
= (cmp
< 0); break;
2134 case GT_EXPR
: val
= (cmp
> 0); break;
2135 case LE_EXPR
: val
= (cmp
<= 0); break;
2136 case GE_EXPR
: val
= (cmp
>= 0); break;
2137 default: done
= false;
2142 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2144 return boolean_false_node
;
2147 /* Likewise if the second comparison is an == comparison. */
2148 else if (code2
== EQ_EXPR
)
2154 case EQ_EXPR
: val
= (cmp
== 0); break;
2155 case NE_EXPR
: val
= (cmp
!= 0); break;
2156 case LT_EXPR
: val
= (cmp
> 0); break;
2157 case GT_EXPR
: val
= (cmp
< 0); break;
2158 case LE_EXPR
: val
= (cmp
>= 0); break;
2159 case GE_EXPR
: val
= (cmp
<= 0); break;
2160 default: done
= false;
2165 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2167 return boolean_false_node
;
2171 /* Same business with inequality tests. */
2172 else if (code1
== NE_EXPR
)
2177 case EQ_EXPR
: val
= (cmp
!= 0); break;
2178 case NE_EXPR
: val
= (cmp
== 0); break;
2179 case LT_EXPR
: val
= (cmp
>= 0); break;
2180 case GT_EXPR
: val
= (cmp
<= 0); break;
2181 case LE_EXPR
: val
= (cmp
> 0); break;
2182 case GE_EXPR
: val
= (cmp
< 0); break;
2187 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2189 else if (code2
== NE_EXPR
)
2194 case EQ_EXPR
: val
= (cmp
== 0); break;
2195 case NE_EXPR
: val
= (cmp
!= 0); break;
2196 case LT_EXPR
: val
= (cmp
<= 0); break;
2197 case GT_EXPR
: val
= (cmp
>= 0); break;
2198 case LE_EXPR
: val
= (cmp
< 0); break;
2199 case GE_EXPR
: val
= (cmp
> 0); break;
2204 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2207 /* Chose the more restrictive of two < or <= comparisons. */
2208 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2209 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2211 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2212 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2214 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2217 /* Likewise chose the more restrictive of two > or >= comparisons. */
2218 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2219 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2221 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2222 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2224 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2227 /* Check for singleton ranges. */
2229 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
2230 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
2231 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
2233 /* Check for disjoint ranges. */
2235 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2236 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2237 return boolean_false_node
;
2239 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2240 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2241 return boolean_false_node
;
2244 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2245 NAME's definition is a truth value. See if there are any simplifications
2246 that can be done against the NAME's definition. */
2247 if (TREE_CODE (op1a
) == SSA_NAME
2248 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2249 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2251 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2252 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2253 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2254 switch (gimple_code (stmt
))
2257 /* Try to simplify by copy-propagating the definition. */
2258 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2261 /* If every argument to the PHI produces the same result when
2262 ANDed with the second comparison, we win.
2263 Do not do this unless the type is bool since we need a bool
2264 result here anyway. */
2265 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2267 tree result
= NULL_TREE
;
2269 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2271 tree arg
= gimple_phi_arg_def (stmt
, i
);
2273 /* If this PHI has itself as an argument, ignore it.
2274 If all the other args produce the same result,
2276 if (arg
== gimple_phi_result (stmt
))
2278 else if (TREE_CODE (arg
) == INTEGER_CST
)
2280 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
2283 result
= boolean_false_node
;
2284 else if (!integer_zerop (result
))
2288 result
= fold_build2 (code2
, boolean_type_node
,
2290 else if (!same_bool_comparison_p (result
,
2294 else if (TREE_CODE (arg
) == SSA_NAME
)
2296 tree temp
= and_var_with_comparison (arg
, invert
,
2302 else if (!same_bool_result_p (result
, temp
))
2318 /* Try to simplify the AND of two comparisons, specified by
2319 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2320 If this can be simplified to a single expression (without requiring
2321 introducing more SSA variables to hold intermediate values),
2322 return the resulting tree. Otherwise return NULL_TREE.
2323 If the result expression is non-null, it has boolean type. */
2326 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2327 enum tree_code code2
, tree op2a
, tree op2b
)
2329 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2333 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2336 /* Helper function for or_comparisons_1: try to simplify the OR of the
2337 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2338 If INVERT is true, invert the value of VAR before doing the OR.
2339 Return NULL_EXPR if we can't simplify this to a single expression. */
2342 or_var_with_comparison (tree var
, bool invert
,
2343 enum tree_code code2
, tree op2a
, tree op2b
)
2346 gimple stmt
= SSA_NAME_DEF_STMT (var
);
2348 /* We can only deal with variables whose definitions are assignments. */
2349 if (!is_gimple_assign (stmt
))
2352 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2353 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2354 Then we only have to consider the simpler non-inverted cases. */
2356 t
= and_var_with_comparison_1 (stmt
,
2357 invert_tree_comparison (code2
, false),
2360 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2361 return canonicalize_bool (t
, invert
);
2364 /* Try to simplify the OR of the ssa variable defined by the assignment
2365 STMT with the comparison specified by (OP2A CODE2 OP2B).
2366 Return NULL_EXPR if we can't simplify this to a single expression. */
2369 or_var_with_comparison_1 (gimple stmt
,
2370 enum tree_code code2
, tree op2a
, tree op2b
)
2372 tree var
= gimple_assign_lhs (stmt
);
2373 tree true_test_var
= NULL_TREE
;
2374 tree false_test_var
= NULL_TREE
;
2375 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2377 /* Check for identities like (var OR (var != 0)) => true . */
2378 if (TREE_CODE (op2a
) == SSA_NAME
2379 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2381 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2382 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2384 true_test_var
= op2a
;
2385 if (var
== true_test_var
)
2388 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2389 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2391 false_test_var
= op2a
;
2392 if (var
== false_test_var
)
2393 return boolean_true_node
;
2397 /* If the definition is a comparison, recurse on it. */
2398 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2400 tree t
= or_comparisons_1 (innercode
,
2401 gimple_assign_rhs1 (stmt
),
2402 gimple_assign_rhs2 (stmt
),
2410 /* If the definition is an AND or OR expression, we may be able to
2411 simplify by reassociating. */
2412 if (innercode
== TRUTH_AND_EXPR
2413 || innercode
== TRUTH_OR_EXPR
2414 || (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2415 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
)))
2417 tree inner1
= gimple_assign_rhs1 (stmt
);
2418 tree inner2
= gimple_assign_rhs2 (stmt
);
2421 tree partial
= NULL_TREE
;
2422 bool is_or
= (innercode
== TRUTH_OR_EXPR
|| innercode
== BIT_IOR_EXPR
);
2424 /* Check for boolean identities that don't require recursive examination
2426 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2427 inner1 OR (inner1 AND inner2) => inner1
2428 !inner1 OR (inner1 OR inner2) => true
2429 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2431 if (inner1
== true_test_var
)
2432 return (is_or
? var
: inner1
);
2433 else if (inner2
== true_test_var
)
2434 return (is_or
? var
: inner2
);
2435 else if (inner1
== false_test_var
)
2438 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2439 else if (inner2
== false_test_var
)
2442 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2444 /* Next, redistribute/reassociate the OR across the inner tests.
2445 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2446 if (TREE_CODE (inner1
) == SSA_NAME
2447 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2448 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2449 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2450 gimple_assign_rhs1 (s
),
2451 gimple_assign_rhs2 (s
),
2452 code2
, op2a
, op2b
)))
2454 /* Handle the OR case, where we are reassociating:
2455 (inner1 OR inner2) OR (op2a code2 op2b)
2457 If the partial result t is a constant, we win. Otherwise
2458 continue on to try reassociating with the other inner test. */
2459 if (innercode
== TRUTH_OR_EXPR
)
2461 if (integer_onep (t
))
2462 return boolean_true_node
;
2463 else if (integer_zerop (t
))
2467 /* Handle the AND case, where we are redistributing:
2468 (inner1 AND inner2) OR (op2a code2 op2b)
2469 => (t AND (inner2 OR (op2a code op2b))) */
2472 if (integer_zerop (t
))
2473 return boolean_false_node
;
2475 /* Save partial result for later. */
2480 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2481 if (TREE_CODE (inner2
) == SSA_NAME
2482 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2483 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2484 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2485 gimple_assign_rhs1 (s
),
2486 gimple_assign_rhs2 (s
),
2487 code2
, op2a
, op2b
)))
2489 /* Handle the OR case, where we are reassociating:
2490 (inner1 OR inner2) OR (op2a code2 op2b)
2492 if (innercode
== TRUTH_OR_EXPR
)
2494 if (integer_zerop (t
))
2496 else if (integer_onep (t
))
2497 return boolean_true_node
;
2500 /* Handle the AND case, where we are redistributing:
2501 (inner1 AND inner2) OR (op2a code2 op2b)
2502 => (t AND (inner1 OR (op2a code2 op2b)))
2503 => (t AND partial) */
2506 if (integer_zerop (t
))
2507 return boolean_false_node
;
2510 /* We already got a simplification for the other
2511 operand to the redistributed AND expression. The
2512 interesting case is when at least one is true.
2513 Or, if both are the same, we can apply the identity
2514 (x AND x) == true. */
2515 if (integer_onep (partial
))
2517 else if (integer_onep (t
))
2519 else if (same_bool_result_p (t
, partial
))
2520 return boolean_true_node
;
2528 /* Try to simplify the OR of two comparisons defined by
2529 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2530 If this can be done without constructing an intermediate value,
2531 return the resulting tree; otherwise NULL_TREE is returned.
2532 This function is deliberately asymmetric as it recurses on SSA_DEFs
2533 in the first comparison but not the second. */
2536 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2537 enum tree_code code2
, tree op2a
, tree op2b
)
2539 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2540 if (operand_equal_p (op1a
, op2a
, 0)
2541 && operand_equal_p (op1b
, op2b
, 0))
2543 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2544 TRUTH_ORIF_EXPR
, code1
, code2
,
2545 boolean_type_node
, op1a
, op1b
);
2550 /* Likewise the swapped case of the above. */
2551 if (operand_equal_p (op1a
, op2b
, 0)
2552 && operand_equal_p (op1b
, op2a
, 0))
2554 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2555 TRUTH_ORIF_EXPR
, code1
,
2556 swap_tree_comparison (code2
),
2557 boolean_type_node
, op1a
, op1b
);
2562 /* If both comparisons are of the same value against constants, we might
2563 be able to merge them. */
2564 if (operand_equal_p (op1a
, op2a
, 0)
2565 && TREE_CODE (op1b
) == INTEGER_CST
2566 && TREE_CODE (op2b
) == INTEGER_CST
)
2568 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2570 /* If we have (op1a != op1b), we should either be able to
2571 return that or TRUE, depending on whether the constant op1b
2572 also satisfies the other comparison against op2b. */
2573 if (code1
== NE_EXPR
)
2579 case EQ_EXPR
: val
= (cmp
== 0); break;
2580 case NE_EXPR
: val
= (cmp
!= 0); break;
2581 case LT_EXPR
: val
= (cmp
< 0); break;
2582 case GT_EXPR
: val
= (cmp
> 0); break;
2583 case LE_EXPR
: val
= (cmp
<= 0); break;
2584 case GE_EXPR
: val
= (cmp
>= 0); break;
2585 default: done
= false;
2590 return boolean_true_node
;
2592 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2595 /* Likewise if the second comparison is a != comparison. */
2596 else if (code2
== NE_EXPR
)
2602 case EQ_EXPR
: val
= (cmp
== 0); break;
2603 case NE_EXPR
: val
= (cmp
!= 0); break;
2604 case LT_EXPR
: val
= (cmp
> 0); break;
2605 case GT_EXPR
: val
= (cmp
< 0); break;
2606 case LE_EXPR
: val
= (cmp
>= 0); break;
2607 case GE_EXPR
: val
= (cmp
<= 0); break;
2608 default: done
= false;
2613 return boolean_true_node
;
2615 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2619 /* See if an equality test is redundant with the other comparison. */
2620 else if (code1
== EQ_EXPR
)
2625 case EQ_EXPR
: val
= (cmp
== 0); break;
2626 case NE_EXPR
: val
= (cmp
!= 0); break;
2627 case LT_EXPR
: val
= (cmp
< 0); break;
2628 case GT_EXPR
: val
= (cmp
> 0); break;
2629 case LE_EXPR
: val
= (cmp
<= 0); break;
2630 case GE_EXPR
: val
= (cmp
>= 0); break;
2635 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2637 else if (code2
== EQ_EXPR
)
2642 case EQ_EXPR
: val
= (cmp
== 0); break;
2643 case NE_EXPR
: val
= (cmp
!= 0); break;
2644 case LT_EXPR
: val
= (cmp
> 0); break;
2645 case GT_EXPR
: val
= (cmp
< 0); break;
2646 case LE_EXPR
: val
= (cmp
>= 0); break;
2647 case GE_EXPR
: val
= (cmp
<= 0); break;
2652 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2655 /* Chose the less restrictive of two < or <= comparisons. */
2656 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2657 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2659 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2660 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2662 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2665 /* Likewise chose the less restrictive of two > or >= comparisons. */
2666 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2667 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2669 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2670 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2672 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2675 /* Check for singleton ranges. */
2677 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2678 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2679 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2681 /* Check for less/greater pairs that don't restrict the range at all. */
2683 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2684 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2685 return boolean_true_node
;
2687 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2688 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2689 return boolean_true_node
;
2692 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2693 NAME's definition is a truth value. See if there are any simplifications
2694 that can be done against the NAME's definition. */
2695 if (TREE_CODE (op1a
) == SSA_NAME
2696 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2697 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2699 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2700 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2701 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2702 switch (gimple_code (stmt
))
2705 /* Try to simplify by copy-propagating the definition. */
2706 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2709 /* If every argument to the PHI produces the same result when
2710 ORed with the second comparison, we win.
2711 Do not do this unless the type is bool since we need a bool
2712 result here anyway. */
2713 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2715 tree result
= NULL_TREE
;
2717 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2719 tree arg
= gimple_phi_arg_def (stmt
, i
);
2721 /* If this PHI has itself as an argument, ignore it.
2722 If all the other args produce the same result,
2724 if (arg
== gimple_phi_result (stmt
))
2726 else if (TREE_CODE (arg
) == INTEGER_CST
)
2728 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2731 result
= boolean_true_node
;
2732 else if (!integer_onep (result
))
2736 result
= fold_build2 (code2
, boolean_type_node
,
2738 else if (!same_bool_comparison_p (result
,
2742 else if (TREE_CODE (arg
) == SSA_NAME
)
2744 tree temp
= or_var_with_comparison (arg
, invert
,
2750 else if (!same_bool_result_p (result
, temp
))
2766 /* Try to simplify the OR of two comparisons, specified by
2767 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2768 If this can be simplified to a single expression (without requiring
2769 introducing more SSA variables to hold intermediate values),
2770 return the resulting tree. Otherwise return NULL_TREE.
2771 If the result expression is non-null, it has boolean type. */
2774 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2775 enum tree_code code2
, tree op2a
, tree op2b
)
2777 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2781 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);