1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
34 /* Return true when DECL can be referenced from current unit.
35 We can get declarations that are not possible to reference for
38 1) When analyzing C++ virtual tables.
39 C++ virtual tables do have known constructors even
40 when they are keyed to other compilation unit.
41 Those tables can contain pointers to methods and vars
42 in other units. Those methods have both STATIC and EXTERNAL
44 2) In WHOPR mode devirtualization might lead to reference
45 to method that was partitioned elsehwere.
46 In this case we have static VAR_DECL or FUNCTION_DECL
47 that has no corresponding callgraph/varpool node
49 3) COMDAT functions referred by external vtables that
50 we devirtualize only during final copmilation stage.
51 At this time we already decided that we will not output
52 the function body and thus we can't reference the symbol
56 can_refer_decl_in_current_unit_p (tree decl
)
58 struct varpool_node
*vnode
;
59 struct cgraph_node
*node
;
61 if (!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
63 /* External flag is set, so we deal with C++ reference
64 to static object from other file. */
65 if (DECL_EXTERNAL (decl
) && TREE_STATIC (decl
)
66 && TREE_CODE (decl
) == VAR_DECL
)
68 /* Just be sure it is not big in frontend setting
69 flags incorrectly. Those variables should never
71 gcc_checking_assert (!(vnode
= varpool_get_node (decl
))
72 || !vnode
->finalized
);
75 /* When function is public, we always can introduce new reference.
76 Exception are the COMDAT functions where introducing a direct
77 reference imply need to include function body in the curren tunit. */
78 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
80 /* We are not at ltrans stage; so don't worry about WHOPR.
81 Also when still gimplifying all referred comdat functions will be
83 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
85 /* If we already output the function body, we are safe. */
86 if (TREE_ASM_WRITTEN (decl
))
88 if (TREE_CODE (decl
) == FUNCTION_DECL
)
90 node
= cgraph_get_node (decl
);
91 /* Check that we still have function body and that we didn't took
92 the decision to eliminate offline copy of the function yet.
93 The second is important when devirtualization happens during final
94 compilation stage when making a new reference no longer makes callee
96 if (!node
|| !node
->analyzed
|| node
->global
.inlined_to
)
99 else if (TREE_CODE (decl
) == VAR_DECL
)
101 vnode
= varpool_get_node (decl
);
102 if (!vnode
|| !vnode
->finalized
)
108 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
109 acceptable form for is_gimple_min_invariant. */
112 canonicalize_constructor_val (tree cval
)
115 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
)
117 tree t
= maybe_fold_offset_to_address (EXPR_LOCATION (cval
),
118 TREE_OPERAND (cval
, 0),
119 TREE_OPERAND (cval
, 1),
124 if (TREE_CODE (cval
) == ADDR_EXPR
)
126 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
129 && (TREE_CODE (base
) == VAR_DECL
130 || TREE_CODE (base
) == FUNCTION_DECL
)
131 && !can_refer_decl_in_current_unit_p (base
))
133 if (base
&& TREE_CODE (base
) == VAR_DECL
)
134 add_referenced_var (base
);
135 /* We never have the chance to fixup types in global initializers
136 during gimplification. Do so here. */
137 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
138 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
143 /* If SYM is a constant variable with known value, return the value.
144 NULL_TREE is returned otherwise. */
147 get_symbol_constant_value (tree sym
)
149 if (const_value_known_p (sym
))
151 tree val
= DECL_INITIAL (sym
);
154 val
= canonicalize_constructor_val (val
);
155 if (val
&& is_gimple_min_invariant (val
))
160 /* Variables declared 'const' without an initializer
161 have zero as the initializer if they may not be
162 overridden at link or run time. */
164 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
165 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
166 return build_zero_cst (TREE_TYPE (sym
));
173 /* Return true if we may propagate the address expression ADDR into the
174 dereference DEREF and cancel them. */
177 may_propagate_address_into_dereference (tree addr
, tree deref
)
179 gcc_assert (TREE_CODE (deref
) == MEM_REF
180 && TREE_CODE (addr
) == ADDR_EXPR
);
182 /* Don't propagate if ADDR's operand has incomplete type. */
183 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
186 /* If the address is invariant then we do not need to preserve restrict
187 qualifications. But we do need to preserve volatile qualifiers until
188 we can annotate the folded dereference itself properly. */
189 if (is_gimple_min_invariant (addr
)
190 && (!TREE_THIS_VOLATILE (deref
)
191 || TYPE_VOLATILE (TREE_TYPE (addr
))))
192 return useless_type_conversion_p (TREE_TYPE (deref
),
193 TREE_TYPE (TREE_OPERAND (addr
, 0)));
195 /* Else both the address substitution and the folding must result in
196 a valid useless type conversion sequence. */
197 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
199 && useless_type_conversion_p (TREE_TYPE (deref
),
200 TREE_TYPE (TREE_OPERAND (addr
, 0))));
204 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
205 BASE is an array type. OFFSET is a byte displacement.
207 LOC is the location of the original expression. */
210 maybe_fold_offset_to_array_ref (location_t loc
, tree base
, tree offset
)
212 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
213 tree array_type
, elt_type
, elt_size
;
216 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
217 measured in units of the size of elements type) from that ARRAY_REF).
218 We can't do anything if either is variable.
220 The case we handle here is *(&A[N]+O). */
221 if (TREE_CODE (base
) == ARRAY_REF
)
223 tree low_bound
= array_ref_low_bound (base
);
225 elt_offset
= TREE_OPERAND (base
, 1);
226 if (TREE_CODE (low_bound
) != INTEGER_CST
227 || TREE_CODE (elt_offset
) != INTEGER_CST
)
230 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
231 base
= TREE_OPERAND (base
, 0);
234 /* Ignore stupid user tricks of indexing non-array variables. */
235 array_type
= TREE_TYPE (base
);
236 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
238 elt_type
= TREE_TYPE (array_type
);
240 /* Use signed size type for intermediate computation on the index. */
241 idx_type
= ssizetype
;
243 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
244 element type (so we can use the alignment if it's not constant).
245 Otherwise, compute the offset as an index by using a division. If the
246 division isn't exact, then don't do anything. */
247 elt_size
= TYPE_SIZE_UNIT (elt_type
);
250 if (integer_zerop (offset
))
252 if (TREE_CODE (elt_size
) != INTEGER_CST
)
253 elt_size
= size_int (TYPE_ALIGN (elt_type
));
255 idx
= build_int_cst (idx_type
, 0);
259 unsigned HOST_WIDE_INT lquo
, lrem
;
260 HOST_WIDE_INT hquo
, hrem
;
263 /* The final array offset should be signed, so we need
264 to sign-extend the (possibly pointer) offset here
265 and use signed division. */
266 soffset
= double_int_sext (tree_to_double_int (offset
),
267 TYPE_PRECISION (TREE_TYPE (offset
)));
268 if (TREE_CODE (elt_size
) != INTEGER_CST
269 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
270 soffset
.low
, soffset
.high
,
271 TREE_INT_CST_LOW (elt_size
),
272 TREE_INT_CST_HIGH (elt_size
),
273 &lquo
, &hquo
, &lrem
, &hrem
)
277 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
280 /* Assume the low bound is zero. If there is a domain type, get the
281 low bound, if any, convert the index into that type, and add the
283 min_idx
= build_int_cst (idx_type
, 0);
284 domain_type
= TYPE_DOMAIN (array_type
);
287 idx_type
= domain_type
;
288 if (TYPE_MIN_VALUE (idx_type
))
289 min_idx
= TYPE_MIN_VALUE (idx_type
);
291 min_idx
= fold_convert (idx_type
, min_idx
);
293 if (TREE_CODE (min_idx
) != INTEGER_CST
)
296 elt_offset
= fold_convert (idx_type
, elt_offset
);
299 if (!integer_zerop (min_idx
))
300 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
301 if (!integer_zerop (elt_offset
))
302 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
304 /* Make sure to possibly truncate late after offsetting. */
305 idx
= fold_convert (idx_type
, idx
);
307 /* We don't want to construct access past array bounds. For example
310 should not be simplified into (*c)[14] or tree-vrp will
312 This is only an issue for multi-dimensional arrays. */
313 if (TREE_CODE (elt_type
) == ARRAY_TYPE
316 if (TYPE_MAX_VALUE (domain_type
)
317 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
318 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type
), idx
))
320 else if (TYPE_MIN_VALUE (domain_type
)
321 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
322 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
324 else if (compare_tree_int (idx
, 0) < 0)
329 tree t
= build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
330 SET_EXPR_LOCATION (t
, loc
);
336 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
337 LOC is the location of original expression.
339 Before attempting the conversion strip off existing ADDR_EXPRs. */
342 maybe_fold_offset_to_reference (location_t loc
, tree base
, tree offset
,
348 if (TREE_CODE (base
) != ADDR_EXPR
)
351 base
= TREE_OPERAND (base
, 0);
352 if (types_compatible_p (orig_type
, TREE_TYPE (base
))
353 && integer_zerop (offset
))
356 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
);
357 if (ret
&& types_compatible_p (orig_type
, TREE_TYPE (ret
)))
362 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
363 LOC is the location of the original expression. */
366 maybe_fold_offset_to_address (location_t loc
, tree addr
, tree offset
,
372 if (TREE_CODE (addr
) != ADDR_EXPR
)
374 base
= TREE_OPERAND (addr
, 0);
375 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
);
378 ret
= build_fold_addr_expr (ret
);
379 if (!useless_type_conversion_p (orig_type
, TREE_TYPE (ret
)))
381 SET_EXPR_LOCATION (ret
, loc
);
388 /* A quaint feature extant in our address arithmetic is that there
389 can be hidden type changes here. The type of the result need
390 not be the same as the type of the input pointer.
392 What we're after here is an expression of the form
393 (T *)(&array + const)
394 where array is OP0, const is OP1, RES_TYPE is T and
395 the cast doesn't actually exist, but is implicit in the
396 type of the POINTER_PLUS_EXPR. We'd like to turn this into
398 which may be able to propagate further. */
401 maybe_fold_stmt_addition (location_t loc
, tree res_type
, tree op0
, tree op1
)
406 /* The first operand should be an ADDR_EXPR. */
407 if (TREE_CODE (op0
) != ADDR_EXPR
)
409 op0
= TREE_OPERAND (op0
, 0);
411 /* It had better be a constant. */
412 if (TREE_CODE (op1
) != INTEGER_CST
)
414 /* Or op0 should now be A[0] and the non-constant offset defined
415 via a multiplication by the array element size. */
416 if (TREE_CODE (op0
) == ARRAY_REF
417 /* As we will end up creating a variable index array access
418 in the outermost array dimension make sure there isn't
419 a more inner array that the index could overflow to. */
420 && TREE_CODE (TREE_OPERAND (op0
, 0)) != ARRAY_REF
421 && integer_zerop (TREE_OPERAND (op0
, 1))
422 && TREE_CODE (op1
) == SSA_NAME
)
424 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
425 tree elsz
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
426 if (!host_integerp (elsz
, 1)
427 || !is_gimple_assign (offset_def
))
430 /* Do not build array references of something that we can't
431 see the true number of array dimensions for. */
432 if (!DECL_P (TREE_OPERAND (op0
, 0))
433 && !handled_component_p (TREE_OPERAND (op0
, 0)))
436 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
437 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
438 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
), elsz
))
439 return build_fold_addr_expr
440 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
441 TREE_OPERAND (op0
, 0),
442 gimple_assign_rhs1 (offset_def
),
443 TREE_OPERAND (op0
, 2),
444 TREE_OPERAND (op0
, 3)));
445 else if (integer_onep (elsz
)
446 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
447 return build_fold_addr_expr
448 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
449 TREE_OPERAND (op0
, 0),
451 TREE_OPERAND (op0
, 2),
452 TREE_OPERAND (op0
, 3)));
454 else if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
456 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0
))) != ARRAY_TYPE
457 && TREE_CODE (op1
) == SSA_NAME
)
459 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
460 tree elsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0
)));
461 if (!host_integerp (elsz
, 1)
462 || !is_gimple_assign (offset_def
))
465 /* Do not build array references of something that we can't
466 see the true number of array dimensions for. */
468 && !handled_component_p (op0
))
471 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
472 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
473 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
), elsz
))
474 return build_fold_addr_expr
475 (build4 (ARRAY_REF
, TREE_TYPE (TREE_TYPE (op0
)),
476 op0
, gimple_assign_rhs1 (offset_def
),
477 integer_zero_node
, NULL_TREE
));
478 else if (integer_onep (elsz
)
479 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
480 return build_fold_addr_expr
481 (build4 (ARRAY_REF
, TREE_TYPE (TREE_TYPE (op0
)),
483 integer_zero_node
, NULL_TREE
));
489 /* If the first operand is an ARRAY_REF, expand it so that we can fold
490 the offset into it. */
491 while (TREE_CODE (op0
) == ARRAY_REF
)
493 tree array_obj
= TREE_OPERAND (op0
, 0);
494 tree array_idx
= TREE_OPERAND (op0
, 1);
495 tree elt_type
= TREE_TYPE (op0
);
496 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
499 if (TREE_CODE (array_idx
) != INTEGER_CST
)
501 if (TREE_CODE (elt_size
) != INTEGER_CST
)
504 /* Un-bias the index by the min index of the array type. */
505 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
508 min_idx
= TYPE_MIN_VALUE (min_idx
);
511 if (TREE_CODE (min_idx
) != INTEGER_CST
)
514 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
515 if (!integer_zerop (min_idx
))
516 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
521 /* Convert the index to a byte offset. */
522 array_idx
= fold_convert (sizetype
, array_idx
);
523 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
525 /* Update the operands for the next round, or for folding. */
526 op1
= int_const_binop (PLUS_EXPR
,
531 ptd_type
= TREE_TYPE (res_type
);
532 /* If we want a pointer to void, reconstruct the reference from the
533 array element type. A pointer to that can be trivially converted
534 to void *. This happens as we fold (void *)(ptr p+ off). */
535 if (VOID_TYPE_P (ptd_type
)
536 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
537 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
539 /* At which point we can try some of the same things as for indirects. */
540 t
= maybe_fold_offset_to_array_ref (loc
, op0
, op1
);
543 t
= build_fold_addr_expr (t
);
544 if (!useless_type_conversion_p (res_type
, TREE_TYPE (t
)))
546 SET_EXPR_LOCATION (t
, loc
);
552 /* Subroutine of fold_stmt. We perform several simplifications of the
553 memory reference tree EXPR and make sure to re-gimplify them properly
554 after propagation of constant addresses. IS_LHS is true if the
555 reference is supposed to be an lvalue. */
558 maybe_fold_reference (tree expr
, bool is_lhs
)
564 && (result
= fold_const_aggregate_ref (expr
))
565 && is_gimple_min_invariant (result
))
568 /* ??? We might want to open-code the relevant remaining cases
569 to avoid using the generic fold. */
570 if (handled_component_p (*t
)
571 && CONSTANT_CLASS_P (TREE_OPERAND (*t
, 0)))
573 tree tem
= fold (*t
);
578 while (handled_component_p (*t
))
579 t
= &TREE_OPERAND (*t
, 0);
581 /* Fold back MEM_REFs to reference trees. */
582 if (TREE_CODE (*t
) == MEM_REF
583 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
584 && integer_zerop (TREE_OPERAND (*t
, 1))
585 && (TREE_THIS_VOLATILE (*t
)
586 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
587 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
588 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
589 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
590 /* We have to look out here to not drop a required conversion
591 from the rhs to the lhs if is_lhs, but we don't have the
592 rhs here to verify that. Thus require strict type
594 && types_compatible_p (TREE_TYPE (*t
),
595 TREE_TYPE (TREE_OPERAND
596 (TREE_OPERAND (*t
, 0), 0))))
599 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
600 tem
= maybe_fold_reference (expr
, is_lhs
);
605 /* Canonicalize MEM_REFs invariant address operand. */
606 else if (TREE_CODE (*t
) == MEM_REF
607 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
609 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
610 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
611 TREE_OPERAND (*t
, 0),
612 TREE_OPERAND (*t
, 1));
615 TREE_THIS_VOLATILE (tem
) = volatile_p
;
617 tem
= maybe_fold_reference (expr
, is_lhs
);
623 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
625 tree tem
= maybe_fold_tmr (*t
);
629 tem
= maybe_fold_reference (expr
, is_lhs
);
638 tree tem
= get_symbol_constant_value (*t
);
640 && useless_type_conversion_p (TREE_TYPE (*t
), TREE_TYPE (tem
)))
642 *t
= unshare_expr (tem
);
643 tem
= maybe_fold_reference (expr
, is_lhs
);
654 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
655 replacement rhs for the statement or NULL_TREE if no simplification
656 could be made. It is assumed that the operands have been previously
660 fold_gimple_assign (gimple_stmt_iterator
*si
)
662 gimple stmt
= gsi_stmt (*si
);
663 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
664 location_t loc
= gimple_location (stmt
);
666 tree result
= NULL_TREE
;
668 switch (get_gimple_rhs_class (subcode
))
670 case GIMPLE_SINGLE_RHS
:
672 tree rhs
= gimple_assign_rhs1 (stmt
);
674 /* Try to fold a conditional expression. */
675 if (TREE_CODE (rhs
) == COND_EXPR
)
677 tree op0
= COND_EXPR_COND (rhs
);
680 location_t cond_loc
= EXPR_LOCATION (rhs
);
682 if (COMPARISON_CLASS_P (op0
))
684 fold_defer_overflow_warnings ();
685 tem
= fold_binary_loc (cond_loc
,
686 TREE_CODE (op0
), TREE_TYPE (op0
),
687 TREE_OPERAND (op0
, 0),
688 TREE_OPERAND (op0
, 1));
689 /* This is actually a conditional expression, not a GIMPLE
690 conditional statement, however, the valid_gimple_rhs_p
691 test still applies. */
692 set
= (tem
&& is_gimple_condexpr (tem
)
693 && valid_gimple_rhs_p (tem
));
694 fold_undefer_overflow_warnings (set
, stmt
, 0);
696 else if (is_gimple_min_invariant (op0
))
705 result
= fold_build3_loc (cond_loc
, COND_EXPR
, TREE_TYPE (rhs
), tem
,
706 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
709 else if (REFERENCE_CLASS_P (rhs
))
710 return maybe_fold_reference (rhs
, false);
712 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
714 tree ref
= TREE_OPERAND (rhs
, 0);
715 tree tem
= maybe_fold_reference (ref
, true);
717 && TREE_CODE (tem
) == MEM_REF
718 && integer_zerop (TREE_OPERAND (tem
, 1)))
719 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
721 result
= fold_convert (TREE_TYPE (rhs
),
722 build_fold_addr_expr_loc (loc
, tem
));
723 else if (TREE_CODE (ref
) == MEM_REF
724 && integer_zerop (TREE_OPERAND (ref
, 1)))
725 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
728 else if (TREE_CODE (rhs
) == CONSTRUCTOR
729 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
730 && (CONSTRUCTOR_NELTS (rhs
)
731 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
733 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
737 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
738 if (TREE_CODE (val
) != INTEGER_CST
739 && TREE_CODE (val
) != REAL_CST
740 && TREE_CODE (val
) != FIXED_CST
)
743 return build_vector_from_ctor (TREE_TYPE (rhs
),
744 CONSTRUCTOR_ELTS (rhs
));
747 else if (DECL_P (rhs
))
748 return unshare_expr (get_symbol_constant_value (rhs
));
750 /* If we couldn't fold the RHS, hand over to the generic
752 if (result
== NULL_TREE
)
755 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
756 that may have been added by fold, and "useless" type
757 conversions that might now be apparent due to propagation. */
758 STRIP_USELESS_TYPE_CONVERSION (result
);
760 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
767 case GIMPLE_UNARY_RHS
:
769 tree rhs
= gimple_assign_rhs1 (stmt
);
771 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
774 /* If the operation was a conversion do _not_ mark a
775 resulting constant with TREE_OVERFLOW if the original
776 constant was not. These conversions have implementation
777 defined behavior and retaining the TREE_OVERFLOW flag
778 here would confuse later passes such as VRP. */
779 if (CONVERT_EXPR_CODE_P (subcode
)
780 && TREE_CODE (result
) == INTEGER_CST
781 && TREE_CODE (rhs
) == INTEGER_CST
)
782 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
784 STRIP_USELESS_TYPE_CONVERSION (result
);
785 if (valid_gimple_rhs_p (result
))
788 else if (CONVERT_EXPR_CODE_P (subcode
)
789 && POINTER_TYPE_P (gimple_expr_type (stmt
))
790 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
792 tree type
= gimple_expr_type (stmt
);
793 tree t
= maybe_fold_offset_to_address (loc
,
794 gimple_assign_rhs1 (stmt
),
795 integer_zero_node
, type
);
802 case GIMPLE_BINARY_RHS
:
803 /* Try to fold pointer addition. */
804 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
806 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
807 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
809 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
810 if (!useless_type_conversion_p
811 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
812 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
814 result
= maybe_fold_stmt_addition (gimple_location (stmt
),
816 gimple_assign_rhs1 (stmt
),
817 gimple_assign_rhs2 (stmt
));
821 result
= fold_binary_loc (loc
, subcode
,
822 TREE_TYPE (gimple_assign_lhs (stmt
)),
823 gimple_assign_rhs1 (stmt
),
824 gimple_assign_rhs2 (stmt
));
828 STRIP_USELESS_TYPE_CONVERSION (result
);
829 if (valid_gimple_rhs_p (result
))
832 /* Fold might have produced non-GIMPLE, so if we trust it blindly
833 we lose canonicalization opportunities. Do not go again
834 through fold here though, or the same non-GIMPLE will be
836 if (commutative_tree_code (subcode
)
837 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
838 gimple_assign_rhs2 (stmt
), false))
839 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
840 gimple_assign_rhs2 (stmt
),
841 gimple_assign_rhs1 (stmt
));
845 case GIMPLE_TERNARY_RHS
:
846 result
= fold_ternary_loc (loc
, subcode
,
847 TREE_TYPE (gimple_assign_lhs (stmt
)),
848 gimple_assign_rhs1 (stmt
),
849 gimple_assign_rhs2 (stmt
),
850 gimple_assign_rhs3 (stmt
));
854 STRIP_USELESS_TYPE_CONVERSION (result
);
855 if (valid_gimple_rhs_p (result
))
858 /* Fold might have produced non-GIMPLE, so if we trust it blindly
859 we lose canonicalization opportunities. Do not go again
860 through fold here though, or the same non-GIMPLE will be
862 if (commutative_ternary_tree_code (subcode
)
863 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
864 gimple_assign_rhs2 (stmt
), false))
865 return build3 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
866 gimple_assign_rhs2 (stmt
),
867 gimple_assign_rhs1 (stmt
),
868 gimple_assign_rhs3 (stmt
));
872 case GIMPLE_INVALID_RHS
:
879 /* Attempt to fold a conditional statement. Return true if any changes were
880 made. We only attempt to fold the condition expression, and do not perform
881 any transformation that would require alteration of the cfg. It is
882 assumed that the operands have been previously folded. */
885 fold_gimple_cond (gimple stmt
)
887 tree result
= fold_binary_loc (gimple_location (stmt
),
888 gimple_cond_code (stmt
),
890 gimple_cond_lhs (stmt
),
891 gimple_cond_rhs (stmt
));
895 STRIP_USELESS_TYPE_CONVERSION (result
);
896 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
898 gimple_cond_set_condition_from_tree (stmt
, result
);
906 /* Convert EXPR into a GIMPLE value suitable for substitution on the
907 RHS of an assignment. Insert the necessary statements before
908 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
909 is replaced. If the call is expected to produces a result, then it
910 is replaced by an assignment of the new RHS to the result variable.
911 If the result is to be ignored, then the call is replaced by a
912 GIMPLE_NOP. A proper VDEF chain is retained by making the first
913 VUSE and the last VDEF of the whole sequence be the same as the replaced
914 statement and using new SSA names for stores in between. */
917 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
920 tree tmp
= NULL_TREE
; /* Silence warning. */
921 gimple stmt
, new_stmt
;
922 gimple_stmt_iterator i
;
923 gimple_seq stmts
= gimple_seq_alloc();
924 struct gimplify_ctx gctx
;
926 gimple laststore
= NULL
;
929 stmt
= gsi_stmt (*si_p
);
931 gcc_assert (is_gimple_call (stmt
));
933 lhs
= gimple_call_lhs (stmt
);
934 reaching_vuse
= gimple_vuse (stmt
);
936 push_gimplify_context (&gctx
);
938 if (lhs
== NULL_TREE
)
940 gimplify_and_add (expr
, &stmts
);
941 /* We can end up with folding a memcpy of an empty class assignment
942 which gets optimized away by C++ gimplification. */
943 if (gimple_seq_empty_p (stmts
))
945 pop_gimplify_context (NULL
);
946 if (gimple_in_ssa_p (cfun
))
948 unlink_stmt_vdef (stmt
);
951 gsi_remove (si_p
, true);
956 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
958 pop_gimplify_context (NULL
);
960 if (gimple_has_location (stmt
))
961 annotate_all_with_location (stmts
, gimple_location (stmt
));
963 /* The replacement can expose previously unreferenced variables. */
964 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
968 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
971 new_stmt
= gsi_stmt (i
);
972 if (gimple_in_ssa_p (cfun
))
974 find_new_referenced_vars (new_stmt
);
975 mark_symbols_for_renaming (new_stmt
);
977 /* If the new statement has a VUSE, update it with exact SSA name we
978 know will reach this one. */
979 if (gimple_vuse (new_stmt
))
981 /* If we've also seen a previous store create a new VDEF for
982 the latter one, and make that the new reaching VUSE. */
985 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
986 gimple_set_vdef (laststore
, reaching_vuse
);
987 update_stmt (laststore
);
990 gimple_set_vuse (new_stmt
, reaching_vuse
);
991 gimple_set_modified (new_stmt
, true);
993 if (gimple_assign_single_p (new_stmt
)
994 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
996 laststore
= new_stmt
;
1001 if (lhs
== NULL_TREE
)
1003 /* If we replace a call without LHS that has a VDEF and our new
1004 sequence ends with a store we must make that store have the same
1005 vdef in order not to break the sequencing. This can happen
1006 for instance when folding memcpy calls into assignments. */
1007 if (gimple_vdef (stmt
) && laststore
)
1009 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
1010 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1011 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
1012 update_stmt (laststore
);
1014 else if (gimple_in_ssa_p (cfun
))
1016 unlink_stmt_vdef (stmt
);
1017 release_defs (stmt
);
1025 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
1028 if (laststore
&& is_gimple_reg (lhs
))
1030 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
1031 update_stmt (laststore
);
1032 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1033 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
1038 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
1039 gimple_set_vdef (laststore
, reaching_vuse
);
1040 update_stmt (laststore
);
1043 new_stmt
= gimple_build_assign (lhs
, tmp
);
1044 if (!is_gimple_reg (tmp
))
1045 gimple_set_vuse (new_stmt
, reaching_vuse
);
1046 if (!is_gimple_reg (lhs
))
1048 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1049 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1050 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = new_stmt
;
1052 else if (reaching_vuse
== gimple_vuse (stmt
))
1053 unlink_stmt_vdef (stmt
);
1056 gimple_set_location (new_stmt
, gimple_location (stmt
));
1057 gsi_replace (si_p
, new_stmt
, false);
1060 /* Return the string length, maximum string length or maximum value of
1062 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1063 is not NULL and, for TYPE == 0, its value is not equal to the length
1064 we determine or if we are unable to determine the length or value,
1065 return false. VISITED is a bitmap of visited variables.
1066 TYPE is 0 if string length should be returned, 1 for maximum string
1067 length and 2 for maximum value ARG can have. */
1070 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
1075 if (TREE_CODE (arg
) != SSA_NAME
)
1077 if (TREE_CODE (arg
) == COND_EXPR
)
1078 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
1079 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
1080 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1081 else if (TREE_CODE (arg
) == ADDR_EXPR
1082 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1083 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1085 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1086 if (TREE_CODE (aop0
) == INDIRECT_REF
1087 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1088 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1089 length
, visited
, type
);
1095 if (TREE_CODE (val
) != INTEGER_CST
1096 || tree_int_cst_sgn (val
) < 0)
1100 val
= c_strlen (arg
, 1);
1108 if (TREE_CODE (*length
) != INTEGER_CST
1109 || TREE_CODE (val
) != INTEGER_CST
)
1112 if (tree_int_cst_lt (*length
, val
))
1116 else if (simple_cst_equal (val
, *length
) != 1)
1124 /* If we were already here, break the infinite cycle. */
1125 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
1129 def_stmt
= SSA_NAME_DEF_STMT (var
);
1131 switch (gimple_code (def_stmt
))
1134 /* The RHS of the statement defining VAR must either have a
1135 constant length or come from another SSA_NAME with a constant
1137 if (gimple_assign_single_p (def_stmt
)
1138 || gimple_assign_unary_nop_p (def_stmt
))
1140 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1141 return get_maxval_strlen (rhs
, length
, visited
, type
);
1147 /* All the arguments of the PHI node must have the same constant
1151 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1153 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1155 /* If this PHI has itself as an argument, we cannot
1156 determine the string length of this argument. However,
1157 if we can find a constant string length for the other
1158 PHI args then we can still be sure that this is a
1159 constant string length. So be optimistic and just
1160 continue with the next argument. */
1161 if (arg
== gimple_phi_result (def_stmt
))
1164 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1176 /* Fold builtin call in statement STMT. Returns a simplified tree.
1177 We may return a non-constant expression, including another call
1178 to a different function and with different arguments, e.g.,
1179 substituting memcpy for strcpy when the string length is known.
1180 Note that some builtins expand into inline code that may not
1181 be valid in GIMPLE. Callers must take care. */
1184 gimple_fold_builtin (gimple stmt
)
1186 tree result
, val
[3];
1192 location_t loc
= gimple_location (stmt
);
1194 gcc_assert (is_gimple_call (stmt
));
1196 ignore
= (gimple_call_lhs (stmt
) == NULL
);
1198 /* First try the generic builtin folder. If that succeeds, return the
1200 result
= fold_call_stmt (stmt
, ignore
);
1204 STRIP_NOPS (result
);
1208 /* Ignore MD builtins. */
1209 callee
= gimple_call_fndecl (stmt
);
1210 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1213 /* If the builtin could not be folded, and it has no argument list,
1215 nargs
= gimple_call_num_args (stmt
);
1219 /* Limit the work only for builtins we know how to simplify. */
1220 switch (DECL_FUNCTION_CODE (callee
))
1222 case BUILT_IN_STRLEN
:
1223 case BUILT_IN_FPUTS
:
1224 case BUILT_IN_FPUTS_UNLOCKED
:
1228 case BUILT_IN_STRCPY
:
1229 case BUILT_IN_STRNCPY
:
1233 case BUILT_IN_MEMCPY_CHK
:
1234 case BUILT_IN_MEMPCPY_CHK
:
1235 case BUILT_IN_MEMMOVE_CHK
:
1236 case BUILT_IN_MEMSET_CHK
:
1237 case BUILT_IN_STRNCPY_CHK
:
1241 case BUILT_IN_STRCPY_CHK
:
1242 case BUILT_IN_STPCPY_CHK
:
1246 case BUILT_IN_SNPRINTF_CHK
:
1247 case BUILT_IN_VSNPRINTF_CHK
:
1255 if (arg_idx
>= nargs
)
1258 /* Try to use the dataflow information gathered by the CCP process. */
1259 visited
= BITMAP_ALLOC (NULL
);
1260 bitmap_clear (visited
);
1262 memset (val
, 0, sizeof (val
));
1263 a
= gimple_call_arg (stmt
, arg_idx
);
1264 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
1265 val
[arg_idx
] = NULL_TREE
;
1267 BITMAP_FREE (visited
);
1270 switch (DECL_FUNCTION_CODE (callee
))
1272 case BUILT_IN_STRLEN
:
1273 if (val
[0] && nargs
== 1)
1276 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
1278 /* If the result is not a valid gimple value, or not a cast
1279 of a valid gimple value, then we cannot use the result. */
1280 if (is_gimple_val (new_val
)
1281 || (CONVERT_EXPR_P (new_val
)
1282 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
1287 case BUILT_IN_STRCPY
:
1288 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
1289 result
= fold_builtin_strcpy (loc
, callee
,
1290 gimple_call_arg (stmt
, 0),
1291 gimple_call_arg (stmt
, 1),
1295 case BUILT_IN_STRNCPY
:
1296 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1297 result
= fold_builtin_strncpy (loc
, callee
,
1298 gimple_call_arg (stmt
, 0),
1299 gimple_call_arg (stmt
, 1),
1300 gimple_call_arg (stmt
, 2),
1304 case BUILT_IN_FPUTS
:
1306 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1307 gimple_call_arg (stmt
, 1),
1308 ignore
, false, val
[0]);
1311 case BUILT_IN_FPUTS_UNLOCKED
:
1313 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1314 gimple_call_arg (stmt
, 1),
1315 ignore
, true, val
[0]);
1318 case BUILT_IN_MEMCPY_CHK
:
1319 case BUILT_IN_MEMPCPY_CHK
:
1320 case BUILT_IN_MEMMOVE_CHK
:
1321 case BUILT_IN_MEMSET_CHK
:
1322 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1323 result
= fold_builtin_memory_chk (loc
, callee
,
1324 gimple_call_arg (stmt
, 0),
1325 gimple_call_arg (stmt
, 1),
1326 gimple_call_arg (stmt
, 2),
1327 gimple_call_arg (stmt
, 3),
1329 DECL_FUNCTION_CODE (callee
));
1332 case BUILT_IN_STRCPY_CHK
:
1333 case BUILT_IN_STPCPY_CHK
:
1334 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1335 result
= fold_builtin_stxcpy_chk (loc
, callee
,
1336 gimple_call_arg (stmt
, 0),
1337 gimple_call_arg (stmt
, 1),
1338 gimple_call_arg (stmt
, 2),
1340 DECL_FUNCTION_CODE (callee
));
1343 case BUILT_IN_STRNCPY_CHK
:
1344 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1345 result
= fold_builtin_strncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1346 gimple_call_arg (stmt
, 1),
1347 gimple_call_arg (stmt
, 2),
1348 gimple_call_arg (stmt
, 3),
1352 case BUILT_IN_SNPRINTF_CHK
:
1353 case BUILT_IN_VSNPRINTF_CHK
:
1354 if (val
[1] && is_gimple_val (val
[1]))
1355 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1356 DECL_FUNCTION_CODE (callee
));
1363 if (result
&& ignore
)
1364 result
= fold_ignored_result (result
);
1368 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1369 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1370 KNOWN_BINFO carries the binfo describing the true type of
1371 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1372 with a this adjustment, the constant which should be added to this pointer
1373 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1374 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1377 gimple_get_virt_mehtod_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
1378 tree
*delta
, bool refuse_thunks
)
1382 struct cgraph_node
*node
;
1384 v
= BINFO_VIRTUALS (known_binfo
);
1385 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1391 i
+= (TARGET_VTABLE_USES_DESCRIPTORS
1392 ? TARGET_VTABLE_USES_DESCRIPTORS
: 1);
1396 fndecl
= TREE_VALUE (v
);
1397 node
= cgraph_get_node_or_alias (fndecl
);
1400 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1401 thunks are represented by a constant in TREE_PURPOSE of items in
1402 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1405 FIXME: Remove the following condition once we are able to represent
1406 thunk information on call graph edges. */
1407 || (node
->same_body_alias
&& node
->thunk
.thunk_p
)))
1410 /* When cgraph node is missing and function is not public, we cannot
1411 devirtualize. This can happen in WHOPR when the actual method
1412 ends up in other partition, because we found devirtualization
1413 possibility too late. */
1414 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v
)))
1417 *delta
= TREE_PURPOSE (v
);
1418 gcc_checking_assert (host_integerp (*delta
, 0));
1422 /* Generate code adjusting the first parameter of a call statement determined
1426 gimple_adjust_this_by_delta (gimple_stmt_iterator
*gsi
, tree delta
)
1428 gimple call_stmt
= gsi_stmt (*gsi
);
1432 delta
= fold_convert (sizetype
, delta
);
1433 gcc_assert (gimple_call_num_args (call_stmt
) >= 1);
1434 parm
= gimple_call_arg (call_stmt
, 0);
1435 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm
)));
1436 tmp
= create_tmp_var (TREE_TYPE (parm
), NULL
);
1437 add_referenced_var (tmp
);
1439 tmp
= make_ssa_name (tmp
, NULL
);
1440 new_stmt
= gimple_build_assign_with_ops (POINTER_PLUS_EXPR
, tmp
, parm
, delta
);
1441 SSA_NAME_DEF_STMT (tmp
) = new_stmt
;
1442 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1443 gimple_call_set_arg (call_stmt
, 0, tmp
);
1446 /* Fold a call statement to OBJ_TYPE_REF to a direct call, if possible. GSI
1447 determines the statement, generating new statements is allowed only if
1448 INPLACE is false. Return true iff the statement was changed. */
1451 gimple_fold_obj_type_ref_call (gimple_stmt_iterator
*gsi
)
1453 gimple stmt
= gsi_stmt (*gsi
);
1454 tree ref
= gimple_call_fn (stmt
);
1455 tree obj
= OBJ_TYPE_REF_OBJECT (ref
);
1456 tree binfo
, fndecl
, delta
;
1457 HOST_WIDE_INT token
;
1459 if (TREE_CODE (obj
) != ADDR_EXPR
)
1461 obj
= TREE_OPERAND (obj
, 0);
1463 || TREE_CODE (TREE_TYPE (obj
)) != RECORD_TYPE
)
1465 binfo
= TYPE_BINFO (TREE_TYPE (obj
));
1469 token
= tree_low_cst (OBJ_TYPE_REF_TOKEN (ref
), 1);
1470 fndecl
= gimple_get_virt_mehtod_for_binfo (token
, binfo
, &delta
, false);
1473 gcc_assert (integer_zerop (delta
));
1474 gimple_call_set_fndecl (stmt
, fndecl
);
1478 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1479 The statement may be replaced by another statement, e.g., if the call
1480 simplifies to a constant value. Return true if any changes were made.
1481 It is assumed that the operands have been previously folded. */
1484 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1486 gimple stmt
= gsi_stmt (*gsi
);
1488 tree callee
= gimple_call_fndecl (stmt
);
1490 /* Check for builtins that CCP can handle using information not
1491 available in the generic fold routines. */
1492 if (!inplace
&& callee
&& DECL_BUILT_IN (callee
))
1494 tree result
= gimple_fold_builtin (stmt
);
1498 if (!update_call_from_tree (gsi
, result
))
1499 gimplify_and_update_call_from_tree (gsi
, result
);
1505 /* ??? Should perhaps do this in fold proper. However, doing it
1506 there requires that we create a new CALL_EXPR, and that requires
1507 copying EH region info to the new node. Easier to just do it
1508 here where we can just smash the call operand. */
1509 callee
= gimple_call_fn (stmt
);
1510 if (TREE_CODE (callee
) == OBJ_TYPE_REF
)
1511 return gimple_fold_obj_type_ref_call (gsi
);
1517 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1518 distinguishes both cases. */
1521 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1523 bool changed
= false;
1524 gimple stmt
= gsi_stmt (*gsi
);
1527 /* Fold the main computation performed by the statement. */
1528 switch (gimple_code (stmt
))
1532 unsigned old_num_ops
= gimple_num_ops (stmt
);
1533 tree new_rhs
= fold_gimple_assign (gsi
);
1534 tree lhs
= gimple_assign_lhs (stmt
);
1536 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1537 TREE_TYPE (new_rhs
)))
1538 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1541 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1543 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1550 changed
|= fold_gimple_cond (stmt
);
1554 /* Fold *& in call arguments. */
1555 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1556 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1558 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1561 gimple_call_set_arg (stmt
, i
, tmp
);
1565 changed
|= gimple_fold_call (gsi
, inplace
);
1569 /* Fold *& in asm operands. */
1570 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1572 tree link
= gimple_asm_output_op (stmt
, i
);
1573 tree op
= TREE_VALUE (link
);
1574 if (REFERENCE_CLASS_P (op
)
1575 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1577 TREE_VALUE (link
) = op
;
1581 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1583 tree link
= gimple_asm_input_op (stmt
, i
);
1584 tree op
= TREE_VALUE (link
);
1585 if (REFERENCE_CLASS_P (op
)
1586 && (op
= maybe_fold_reference (op
, false)) != NULL_TREE
)
1588 TREE_VALUE (link
) = op
;
1595 if (gimple_debug_bind_p (stmt
))
1597 tree val
= gimple_debug_bind_get_value (stmt
);
1599 && REFERENCE_CLASS_P (val
))
1601 tree tem
= maybe_fold_reference (val
, false);
1604 gimple_debug_bind_set_value (stmt
, tem
);
1614 stmt
= gsi_stmt (*gsi
);
1616 /* Fold *& on the lhs. */
1617 if (gimple_has_lhs (stmt
))
1619 tree lhs
= gimple_get_lhs (stmt
);
1620 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1622 tree new_lhs
= maybe_fold_reference (lhs
, true);
1625 gimple_set_lhs (stmt
, new_lhs
);
1634 /* Fold the statement pointed to by GSI. In some cases, this function may
1635 replace the whole statement with a new one. Returns true iff folding
1637 The statement pointed to by GSI should be in valid gimple form but may
1638 be in unfolded state as resulting from for example constant propagation
1639 which can produce *&x = 0. */
1642 fold_stmt (gimple_stmt_iterator
*gsi
)
1644 return fold_stmt_1 (gsi
, false);
1647 /* Perform the minimal folding on statement STMT. Only operations like
1648 *&x created by constant propagation are handled. The statement cannot
1649 be replaced with a new one. Return true if the statement was
1650 changed, false otherwise.
1651 The statement STMT should be in valid gimple form but may
1652 be in unfolded state as resulting from for example constant propagation
1653 which can produce *&x = 0. */
1656 fold_stmt_inplace (gimple stmt
)
1658 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1659 bool changed
= fold_stmt_1 (&gsi
, true);
1660 gcc_assert (gsi_stmt (gsi
) == stmt
);
1664 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1665 if EXPR is null or we don't know how.
1666 If non-null, the result always has boolean type. */
1669 canonicalize_bool (tree expr
, bool invert
)
1675 if (integer_nonzerop (expr
))
1676 return boolean_false_node
;
1677 else if (integer_zerop (expr
))
1678 return boolean_true_node
;
1679 else if (TREE_CODE (expr
) == SSA_NAME
)
1680 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1681 build_int_cst (TREE_TYPE (expr
), 0));
1682 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1683 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1685 TREE_OPERAND (expr
, 0),
1686 TREE_OPERAND (expr
, 1));
1692 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1694 if (integer_nonzerop (expr
))
1695 return boolean_true_node
;
1696 else if (integer_zerop (expr
))
1697 return boolean_false_node
;
1698 else if (TREE_CODE (expr
) == SSA_NAME
)
1699 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1700 build_int_cst (TREE_TYPE (expr
), 0));
1701 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1702 return fold_build2 (TREE_CODE (expr
),
1704 TREE_OPERAND (expr
, 0),
1705 TREE_OPERAND (expr
, 1));
1711 /* Check to see if a boolean expression EXPR is logically equivalent to the
1712 comparison (OP1 CODE OP2). Check for various identities involving
1716 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1717 const_tree op1
, const_tree op2
)
1721 /* The obvious case. */
1722 if (TREE_CODE (expr
) == code
1723 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1724 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1727 /* Check for comparing (name, name != 0) and the case where expr
1728 is an SSA_NAME with a definition matching the comparison. */
1729 if (TREE_CODE (expr
) == SSA_NAME
1730 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1732 if (operand_equal_p (expr
, op1
, 0))
1733 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1734 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1735 s
= SSA_NAME_DEF_STMT (expr
);
1736 if (is_gimple_assign (s
)
1737 && gimple_assign_rhs_code (s
) == code
1738 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1739 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1743 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1744 of name is a comparison, recurse. */
1745 if (TREE_CODE (op1
) == SSA_NAME
1746 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1748 s
= SSA_NAME_DEF_STMT (op1
);
1749 if (is_gimple_assign (s
)
1750 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1752 enum tree_code c
= gimple_assign_rhs_code (s
);
1753 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1754 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1755 return same_bool_comparison_p (expr
, c
,
1756 gimple_assign_rhs1 (s
),
1757 gimple_assign_rhs2 (s
));
1758 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1759 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1760 return same_bool_comparison_p (expr
,
1761 invert_tree_comparison (c
, false),
1762 gimple_assign_rhs1 (s
),
1763 gimple_assign_rhs2 (s
));
1769 /* Check to see if two boolean expressions OP1 and OP2 are logically
1773 same_bool_result_p (const_tree op1
, const_tree op2
)
1775 /* Simple cases first. */
1776 if (operand_equal_p (op1
, op2
, 0))
1779 /* Check the cases where at least one of the operands is a comparison.
1780 These are a bit smarter than operand_equal_p in that they apply some
1781 identifies on SSA_NAMEs. */
1782 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1783 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1784 TREE_OPERAND (op2
, 0),
1785 TREE_OPERAND (op2
, 1)))
1787 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1788 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1789 TREE_OPERAND (op1
, 0),
1790 TREE_OPERAND (op1
, 1)))
1797 /* Forward declarations for some mutually recursive functions. */
1800 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1801 enum tree_code code2
, tree op2a
, tree op2b
);
1803 and_var_with_comparison (tree var
, bool invert
,
1804 enum tree_code code2
, tree op2a
, tree op2b
);
1806 and_var_with_comparison_1 (gimple stmt
,
1807 enum tree_code code2
, tree op2a
, tree op2b
);
1809 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1810 enum tree_code code2
, tree op2a
, tree op2b
);
1812 or_var_with_comparison (tree var
, bool invert
,
1813 enum tree_code code2
, tree op2a
, tree op2b
);
1815 or_var_with_comparison_1 (gimple stmt
,
1816 enum tree_code code2
, tree op2a
, tree op2b
);
1818 /* Helper function for and_comparisons_1: try to simplify the AND of the
1819 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1820 If INVERT is true, invert the value of the VAR before doing the AND.
1821 Return NULL_EXPR if we can't simplify this to a single expression. */
1824 and_var_with_comparison (tree var
, bool invert
,
1825 enum tree_code code2
, tree op2a
, tree op2b
)
1828 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1830 /* We can only deal with variables whose definitions are assignments. */
1831 if (!is_gimple_assign (stmt
))
1834 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1835 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1836 Then we only have to consider the simpler non-inverted cases. */
1838 t
= or_var_with_comparison_1 (stmt
,
1839 invert_tree_comparison (code2
, false),
1842 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1843 return canonicalize_bool (t
, invert
);
1846 /* Try to simplify the AND of the ssa variable defined by the assignment
1847 STMT with the comparison specified by (OP2A CODE2 OP2B).
1848 Return NULL_EXPR if we can't simplify this to a single expression. */
1851 and_var_with_comparison_1 (gimple stmt
,
1852 enum tree_code code2
, tree op2a
, tree op2b
)
1854 tree var
= gimple_assign_lhs (stmt
);
1855 tree true_test_var
= NULL_TREE
;
1856 tree false_test_var
= NULL_TREE
;
1857 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1859 /* Check for identities like (var AND (var == 0)) => false. */
1860 if (TREE_CODE (op2a
) == SSA_NAME
1861 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1863 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1864 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1866 true_test_var
= op2a
;
1867 if (var
== true_test_var
)
1870 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1871 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1873 false_test_var
= op2a
;
1874 if (var
== false_test_var
)
1875 return boolean_false_node
;
1879 /* If the definition is a comparison, recurse on it. */
1880 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1882 tree t
= and_comparisons_1 (innercode
,
1883 gimple_assign_rhs1 (stmt
),
1884 gimple_assign_rhs2 (stmt
),
1892 /* If the definition is an AND or OR expression, we may be able to
1893 simplify by reassociating. */
1894 if (innercode
== TRUTH_AND_EXPR
1895 || innercode
== TRUTH_OR_EXPR
1896 || (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1897 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
)))
1899 tree inner1
= gimple_assign_rhs1 (stmt
);
1900 tree inner2
= gimple_assign_rhs2 (stmt
);
1903 tree partial
= NULL_TREE
;
1904 bool is_and
= (innercode
== TRUTH_AND_EXPR
|| innercode
== BIT_AND_EXPR
);
1906 /* Check for boolean identities that don't require recursive examination
1908 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1909 inner1 AND (inner1 OR inner2) => inner1
1910 !inner1 AND (inner1 AND inner2) => false
1911 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1912 Likewise for similar cases involving inner2. */
1913 if (inner1
== true_test_var
)
1914 return (is_and
? var
: inner1
);
1915 else if (inner2
== true_test_var
)
1916 return (is_and
? var
: inner2
);
1917 else if (inner1
== false_test_var
)
1919 ? boolean_false_node
1920 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1921 else if (inner2
== false_test_var
)
1923 ? boolean_false_node
1924 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1926 /* Next, redistribute/reassociate the AND across the inner tests.
1927 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1928 if (TREE_CODE (inner1
) == SSA_NAME
1929 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1930 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1931 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1932 gimple_assign_rhs1 (s
),
1933 gimple_assign_rhs2 (s
),
1934 code2
, op2a
, op2b
)))
1936 /* Handle the AND case, where we are reassociating:
1937 (inner1 AND inner2) AND (op2a code2 op2b)
1939 If the partial result t is a constant, we win. Otherwise
1940 continue on to try reassociating with the other inner test. */
1943 if (integer_onep (t
))
1945 else if (integer_zerop (t
))
1946 return boolean_false_node
;
1949 /* Handle the OR case, where we are redistributing:
1950 (inner1 OR inner2) AND (op2a code2 op2b)
1951 => (t OR (inner2 AND (op2a code2 op2b))) */
1952 else if (integer_onep (t
))
1953 return boolean_true_node
;
1955 /* Save partial result for later. */
1959 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1960 if (TREE_CODE (inner2
) == SSA_NAME
1961 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1962 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1963 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1964 gimple_assign_rhs1 (s
),
1965 gimple_assign_rhs2 (s
),
1966 code2
, op2a
, op2b
)))
1968 /* Handle the AND case, where we are reassociating:
1969 (inner1 AND inner2) AND (op2a code2 op2b)
1970 => (inner1 AND t) */
1973 if (integer_onep (t
))
1975 else if (integer_zerop (t
))
1976 return boolean_false_node
;
1977 /* If both are the same, we can apply the identity
1979 else if (partial
&& same_bool_result_p (t
, partial
))
1983 /* Handle the OR case. where we are redistributing:
1984 (inner1 OR inner2) AND (op2a code2 op2b)
1985 => (t OR (inner1 AND (op2a code2 op2b)))
1986 => (t OR partial) */
1989 if (integer_onep (t
))
1990 return boolean_true_node
;
1993 /* We already got a simplification for the other
1994 operand to the redistributed OR expression. The
1995 interesting case is when at least one is false.
1996 Or, if both are the same, we can apply the identity
1998 if (integer_zerop (partial
))
2000 else if (integer_zerop (t
))
2002 else if (same_bool_result_p (t
, partial
))
2011 /* Try to simplify the AND of two comparisons defined by
2012 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2013 If this can be done without constructing an intermediate value,
2014 return the resulting tree; otherwise NULL_TREE is returned.
2015 This function is deliberately asymmetric as it recurses on SSA_DEFs
2016 in the first comparison but not the second. */
2019 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2020 enum tree_code code2
, tree op2a
, tree op2b
)
2022 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2023 if (operand_equal_p (op1a
, op2a
, 0)
2024 && operand_equal_p (op1b
, op2b
, 0))
2026 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2027 TRUTH_ANDIF_EXPR
, code1
, code2
,
2028 boolean_type_node
, op1a
, op1b
);
2033 /* Likewise the swapped case of the above. */
2034 if (operand_equal_p (op1a
, op2b
, 0)
2035 && operand_equal_p (op1b
, op2a
, 0))
2037 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2038 TRUTH_ANDIF_EXPR
, code1
,
2039 swap_tree_comparison (code2
),
2040 boolean_type_node
, op1a
, op1b
);
2045 /* If both comparisons are of the same value against constants, we might
2046 be able to merge them. */
2047 if (operand_equal_p (op1a
, op2a
, 0)
2048 && TREE_CODE (op1b
) == INTEGER_CST
2049 && TREE_CODE (op2b
) == INTEGER_CST
)
2051 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2053 /* If we have (op1a == op1b), we should either be able to
2054 return that or FALSE, depending on whether the constant op1b
2055 also satisfies the other comparison against op2b. */
2056 if (code1
== EQ_EXPR
)
2062 case EQ_EXPR
: val
= (cmp
== 0); break;
2063 case NE_EXPR
: val
= (cmp
!= 0); break;
2064 case LT_EXPR
: val
= (cmp
< 0); break;
2065 case GT_EXPR
: val
= (cmp
> 0); break;
2066 case LE_EXPR
: val
= (cmp
<= 0); break;
2067 case GE_EXPR
: val
= (cmp
>= 0); break;
2068 default: done
= false;
2073 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2075 return boolean_false_node
;
2078 /* Likewise if the second comparison is an == comparison. */
2079 else if (code2
== EQ_EXPR
)
2085 case EQ_EXPR
: val
= (cmp
== 0); break;
2086 case NE_EXPR
: val
= (cmp
!= 0); break;
2087 case LT_EXPR
: val
= (cmp
> 0); break;
2088 case GT_EXPR
: val
= (cmp
< 0); break;
2089 case LE_EXPR
: val
= (cmp
>= 0); break;
2090 case GE_EXPR
: val
= (cmp
<= 0); break;
2091 default: done
= false;
2096 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2098 return boolean_false_node
;
2102 /* Same business with inequality tests. */
2103 else if (code1
== NE_EXPR
)
2108 case EQ_EXPR
: val
= (cmp
!= 0); break;
2109 case NE_EXPR
: val
= (cmp
== 0); break;
2110 case LT_EXPR
: val
= (cmp
>= 0); break;
2111 case GT_EXPR
: val
= (cmp
<= 0); break;
2112 case LE_EXPR
: val
= (cmp
> 0); break;
2113 case GE_EXPR
: val
= (cmp
< 0); break;
2118 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2120 else if (code2
== NE_EXPR
)
2125 case EQ_EXPR
: val
= (cmp
== 0); break;
2126 case NE_EXPR
: val
= (cmp
!= 0); break;
2127 case LT_EXPR
: val
= (cmp
<= 0); break;
2128 case GT_EXPR
: val
= (cmp
>= 0); break;
2129 case LE_EXPR
: val
= (cmp
< 0); break;
2130 case GE_EXPR
: val
= (cmp
> 0); break;
2135 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2138 /* Chose the more restrictive of two < or <= comparisons. */
2139 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2140 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2142 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2143 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2145 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2148 /* Likewise chose the more restrictive of two > or >= comparisons. */
2149 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2150 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2152 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2153 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2155 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2158 /* Check for singleton ranges. */
2160 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
2161 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
2162 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
2164 /* Check for disjoint ranges. */
2166 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2167 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2168 return boolean_false_node
;
2170 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2171 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2172 return boolean_false_node
;
2175 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2176 NAME's definition is a truth value. See if there are any simplifications
2177 that can be done against the NAME's definition. */
2178 if (TREE_CODE (op1a
) == SSA_NAME
2179 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2180 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2182 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2183 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2184 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2185 switch (gimple_code (stmt
))
2188 /* Try to simplify by copy-propagating the definition. */
2189 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2192 /* If every argument to the PHI produces the same result when
2193 ANDed with the second comparison, we win.
2194 Do not do this unless the type is bool since we need a bool
2195 result here anyway. */
2196 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2198 tree result
= NULL_TREE
;
2200 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2202 tree arg
= gimple_phi_arg_def (stmt
, i
);
2204 /* If this PHI has itself as an argument, ignore it.
2205 If all the other args produce the same result,
2207 if (arg
== gimple_phi_result (stmt
))
2209 else if (TREE_CODE (arg
) == INTEGER_CST
)
2211 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
2214 result
= boolean_false_node
;
2215 else if (!integer_zerop (result
))
2219 result
= fold_build2 (code2
, boolean_type_node
,
2221 else if (!same_bool_comparison_p (result
,
2225 else if (TREE_CODE (arg
) == SSA_NAME
)
2227 tree temp
= and_var_with_comparison (arg
, invert
,
2233 else if (!same_bool_result_p (result
, temp
))
2249 /* Try to simplify the AND of two comparisons, specified by
2250 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2251 If this can be simplified to a single expression (without requiring
2252 introducing more SSA variables to hold intermediate values),
2253 return the resulting tree. Otherwise return NULL_TREE.
2254 If the result expression is non-null, it has boolean type. */
2257 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2258 enum tree_code code2
, tree op2a
, tree op2b
)
2260 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2264 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2267 /* Helper function for or_comparisons_1: try to simplify the OR of the
2268 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2269 If INVERT is true, invert the value of VAR before doing the OR.
2270 Return NULL_EXPR if we can't simplify this to a single expression. */
2273 or_var_with_comparison (tree var
, bool invert
,
2274 enum tree_code code2
, tree op2a
, tree op2b
)
2277 gimple stmt
= SSA_NAME_DEF_STMT (var
);
2279 /* We can only deal with variables whose definitions are assignments. */
2280 if (!is_gimple_assign (stmt
))
2283 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2284 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2285 Then we only have to consider the simpler non-inverted cases. */
2287 t
= and_var_with_comparison_1 (stmt
,
2288 invert_tree_comparison (code2
, false),
2291 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2292 return canonicalize_bool (t
, invert
);
2295 /* Try to simplify the OR of the ssa variable defined by the assignment
2296 STMT with the comparison specified by (OP2A CODE2 OP2B).
2297 Return NULL_EXPR if we can't simplify this to a single expression. */
2300 or_var_with_comparison_1 (gimple stmt
,
2301 enum tree_code code2
, tree op2a
, tree op2b
)
2303 tree var
= gimple_assign_lhs (stmt
);
2304 tree true_test_var
= NULL_TREE
;
2305 tree false_test_var
= NULL_TREE
;
2306 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2308 /* Check for identities like (var OR (var != 0)) => true . */
2309 if (TREE_CODE (op2a
) == SSA_NAME
2310 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2312 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2313 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2315 true_test_var
= op2a
;
2316 if (var
== true_test_var
)
2319 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2320 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2322 false_test_var
= op2a
;
2323 if (var
== false_test_var
)
2324 return boolean_true_node
;
2328 /* If the definition is a comparison, recurse on it. */
2329 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2331 tree t
= or_comparisons_1 (innercode
,
2332 gimple_assign_rhs1 (stmt
),
2333 gimple_assign_rhs2 (stmt
),
2341 /* If the definition is an AND or OR expression, we may be able to
2342 simplify by reassociating. */
2343 if (innercode
== TRUTH_AND_EXPR
2344 || innercode
== TRUTH_OR_EXPR
2345 || (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2346 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
)))
2348 tree inner1
= gimple_assign_rhs1 (stmt
);
2349 tree inner2
= gimple_assign_rhs2 (stmt
);
2352 tree partial
= NULL_TREE
;
2353 bool is_or
= (innercode
== TRUTH_OR_EXPR
|| innercode
== BIT_IOR_EXPR
);
2355 /* Check for boolean identities that don't require recursive examination
2357 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2358 inner1 OR (inner1 AND inner2) => inner1
2359 !inner1 OR (inner1 OR inner2) => true
2360 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2362 if (inner1
== true_test_var
)
2363 return (is_or
? var
: inner1
);
2364 else if (inner2
== true_test_var
)
2365 return (is_or
? var
: inner2
);
2366 else if (inner1
== false_test_var
)
2369 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2370 else if (inner2
== false_test_var
)
2373 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2375 /* Next, redistribute/reassociate the OR across the inner tests.
2376 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2377 if (TREE_CODE (inner1
) == SSA_NAME
2378 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2379 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2380 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2381 gimple_assign_rhs1 (s
),
2382 gimple_assign_rhs2 (s
),
2383 code2
, op2a
, op2b
)))
2385 /* Handle the OR case, where we are reassociating:
2386 (inner1 OR inner2) OR (op2a code2 op2b)
2388 If the partial result t is a constant, we win. Otherwise
2389 continue on to try reassociating with the other inner test. */
2392 if (integer_onep (t
))
2393 return boolean_true_node
;
2394 else if (integer_zerop (t
))
2398 /* Handle the AND case, where we are redistributing:
2399 (inner1 AND inner2) OR (op2a code2 op2b)
2400 => (t AND (inner2 OR (op2a code op2b))) */
2401 else if (integer_zerop (t
))
2402 return boolean_false_node
;
2404 /* Save partial result for later. */
2408 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2409 if (TREE_CODE (inner2
) == SSA_NAME
2410 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2411 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2412 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2413 gimple_assign_rhs1 (s
),
2414 gimple_assign_rhs2 (s
),
2415 code2
, op2a
, op2b
)))
2417 /* Handle the OR case, where we are reassociating:
2418 (inner1 OR inner2) OR (op2a code2 op2b)
2420 => (t OR partial) */
2423 if (integer_zerop (t
))
2425 else if (integer_onep (t
))
2426 return boolean_true_node
;
2427 /* If both are the same, we can apply the identity
2429 else if (partial
&& same_bool_result_p (t
, partial
))
2433 /* Handle the AND case, where we are redistributing:
2434 (inner1 AND inner2) OR (op2a code2 op2b)
2435 => (t AND (inner1 OR (op2a code2 op2b)))
2436 => (t AND partial) */
2439 if (integer_zerop (t
))
2440 return boolean_false_node
;
2443 /* We already got a simplification for the other
2444 operand to the redistributed AND expression. The
2445 interesting case is when at least one is true.
2446 Or, if both are the same, we can apply the identity
2448 if (integer_onep (partial
))
2450 else if (integer_onep (t
))
2452 else if (same_bool_result_p (t
, partial
))
2461 /* Try to simplify the OR of two comparisons defined by
2462 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2463 If this can be done without constructing an intermediate value,
2464 return the resulting tree; otherwise NULL_TREE is returned.
2465 This function is deliberately asymmetric as it recurses on SSA_DEFs
2466 in the first comparison but not the second. */
2469 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2470 enum tree_code code2
, tree op2a
, tree op2b
)
2472 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2473 if (operand_equal_p (op1a
, op2a
, 0)
2474 && operand_equal_p (op1b
, op2b
, 0))
2476 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2477 TRUTH_ORIF_EXPR
, code1
, code2
,
2478 boolean_type_node
, op1a
, op1b
);
2483 /* Likewise the swapped case of the above. */
2484 if (operand_equal_p (op1a
, op2b
, 0)
2485 && operand_equal_p (op1b
, op2a
, 0))
2487 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2488 TRUTH_ORIF_EXPR
, code1
,
2489 swap_tree_comparison (code2
),
2490 boolean_type_node
, op1a
, op1b
);
2495 /* If both comparisons are of the same value against constants, we might
2496 be able to merge them. */
2497 if (operand_equal_p (op1a
, op2a
, 0)
2498 && TREE_CODE (op1b
) == INTEGER_CST
2499 && TREE_CODE (op2b
) == INTEGER_CST
)
2501 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2503 /* If we have (op1a != op1b), we should either be able to
2504 return that or TRUE, depending on whether the constant op1b
2505 also satisfies the other comparison against op2b. */
2506 if (code1
== NE_EXPR
)
2512 case EQ_EXPR
: val
= (cmp
== 0); break;
2513 case NE_EXPR
: val
= (cmp
!= 0); break;
2514 case LT_EXPR
: val
= (cmp
< 0); break;
2515 case GT_EXPR
: val
= (cmp
> 0); break;
2516 case LE_EXPR
: val
= (cmp
<= 0); break;
2517 case GE_EXPR
: val
= (cmp
>= 0); break;
2518 default: done
= false;
2523 return boolean_true_node
;
2525 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2528 /* Likewise if the second comparison is a != comparison. */
2529 else if (code2
== NE_EXPR
)
2535 case EQ_EXPR
: val
= (cmp
== 0); break;
2536 case NE_EXPR
: val
= (cmp
!= 0); break;
2537 case LT_EXPR
: val
= (cmp
> 0); break;
2538 case GT_EXPR
: val
= (cmp
< 0); break;
2539 case LE_EXPR
: val
= (cmp
>= 0); break;
2540 case GE_EXPR
: val
= (cmp
<= 0); break;
2541 default: done
= false;
2546 return boolean_true_node
;
2548 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2552 /* See if an equality test is redundant with the other comparison. */
2553 else if (code1
== EQ_EXPR
)
2558 case EQ_EXPR
: val
= (cmp
== 0); break;
2559 case NE_EXPR
: val
= (cmp
!= 0); break;
2560 case LT_EXPR
: val
= (cmp
< 0); break;
2561 case GT_EXPR
: val
= (cmp
> 0); break;
2562 case LE_EXPR
: val
= (cmp
<= 0); break;
2563 case GE_EXPR
: val
= (cmp
>= 0); break;
2568 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2570 else if (code2
== EQ_EXPR
)
2575 case EQ_EXPR
: val
= (cmp
== 0); break;
2576 case NE_EXPR
: val
= (cmp
!= 0); break;
2577 case LT_EXPR
: val
= (cmp
> 0); break;
2578 case GT_EXPR
: val
= (cmp
< 0); break;
2579 case LE_EXPR
: val
= (cmp
>= 0); break;
2580 case GE_EXPR
: val
= (cmp
<= 0); break;
2585 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2588 /* Chose the less restrictive of two < or <= comparisons. */
2589 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2590 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2592 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2593 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2595 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2598 /* Likewise chose the less restrictive of two > or >= comparisons. */
2599 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2600 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2602 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2603 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2605 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2608 /* Check for singleton ranges. */
2610 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2611 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2612 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2614 /* Check for less/greater pairs that don't restrict the range at all. */
2616 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2617 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2618 return boolean_true_node
;
2620 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2621 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2622 return boolean_true_node
;
2625 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2626 NAME's definition is a truth value. See if there are any simplifications
2627 that can be done against the NAME's definition. */
2628 if (TREE_CODE (op1a
) == SSA_NAME
2629 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2630 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2632 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2633 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2634 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2635 switch (gimple_code (stmt
))
2638 /* Try to simplify by copy-propagating the definition. */
2639 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2642 /* If every argument to the PHI produces the same result when
2643 ORed with the second comparison, we win.
2644 Do not do this unless the type is bool since we need a bool
2645 result here anyway. */
2646 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2648 tree result
= NULL_TREE
;
2650 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2652 tree arg
= gimple_phi_arg_def (stmt
, i
);
2654 /* If this PHI has itself as an argument, ignore it.
2655 If all the other args produce the same result,
2657 if (arg
== gimple_phi_result (stmt
))
2659 else if (TREE_CODE (arg
) == INTEGER_CST
)
2661 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2664 result
= boolean_true_node
;
2665 else if (!integer_onep (result
))
2669 result
= fold_build2 (code2
, boolean_type_node
,
2671 else if (!same_bool_comparison_p (result
,
2675 else if (TREE_CODE (arg
) == SSA_NAME
)
2677 tree temp
= or_var_with_comparison (arg
, invert
,
2683 else if (!same_bool_result_p (result
, temp
))
2699 /* Try to simplify the OR of two comparisons, specified by
2700 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2701 If this can be simplified to a single expression (without requiring
2702 introducing more SSA variables to hold intermediate values),
2703 return the resulting tree. Otherwise return NULL_TREE.
2704 If the result expression is non-null, it has boolean type. */
2707 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2708 enum tree_code code2
, tree op2a
, tree op2b
)
2710 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2714 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);