1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
34 /* Return true when DECL can be referenced from current unit.
35 We can get declarations that are not possible to reference for
38 1) When analyzing C++ virtual tables.
39 C++ virtual tables do have known constructors even
40 when they are keyed to other compilation unit.
41 Those tables can contain pointers to methods and vars
42 in other units. Those methods have both STATIC and EXTERNAL
44 2) In WHOPR mode devirtualization might lead to reference
45 to method that was partitioned elsehwere.
46 In this case we have static VAR_DECL or FUNCTION_DECL
47 that has no corresponding callgraph/varpool node
49 3) COMDAT functions referred by external vtables that
50 we devirtualize only during final copmilation stage.
51 At this time we already decided that we will not output
52 the function body and thus we can't reference the symbol
56 can_refer_decl_in_current_unit_p (tree decl
)
58 struct varpool_node
*vnode
;
59 struct cgraph_node
*node
;
61 if (!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
63 /* External flag is set, so we deal with C++ reference
64 to static object from other file. */
65 if (DECL_EXTERNAL (decl
) && TREE_STATIC (decl
)
66 && TREE_CODE (decl
) == VAR_DECL
)
68 /* Just be sure it is not big in frontend setting
69 flags incorrectly. Those variables should never
71 gcc_checking_assert (!(vnode
= varpool_get_node (decl
))
72 || !vnode
->finalized
);
75 /* When function is public, we always can introduce new reference.
76 Exception are the COMDAT functions where introducing a direct
77 reference imply need to include function body in the curren tunit. */
78 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
80 /* We are not at ltrans stage; so don't worry about WHOPR.
81 Also when still gimplifying all referred comdat functions will be
83 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
85 /* If we already output the function body, we are safe. */
86 if (TREE_ASM_WRITTEN (decl
))
88 if (TREE_CODE (decl
) == FUNCTION_DECL
)
90 node
= cgraph_get_node (decl
);
91 /* Check that we still have function body and that we didn't took
92 the decision to eliminate offline copy of the function yet.
93 The second is important when devirtualization happens during final
94 compilation stage when making a new reference no longer makes callee
96 if (!node
|| !node
->analyzed
|| node
->global
.inlined_to
)
99 else if (TREE_CODE (decl
) == VAR_DECL
)
101 vnode
= varpool_get_node (decl
);
102 if (!vnode
|| !vnode
->finalized
)
108 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
109 acceptable form for is_gimple_min_invariant. */
112 canonicalize_constructor_val (tree cval
)
115 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
)
117 tree t
= maybe_fold_offset_to_address (EXPR_LOCATION (cval
),
118 TREE_OPERAND (cval
, 0),
119 TREE_OPERAND (cval
, 1),
124 if (TREE_CODE (cval
) == ADDR_EXPR
)
126 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
129 && (TREE_CODE (base
) == VAR_DECL
130 || TREE_CODE (base
) == FUNCTION_DECL
)
131 && !can_refer_decl_in_current_unit_p (base
))
133 if (base
&& TREE_CODE (base
) == VAR_DECL
)
134 add_referenced_var (base
);
135 /* We never have the chance to fixup types in global initializers
136 during gimplification. Do so here. */
137 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
138 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
143 /* If SYM is a constant variable with known value, return the value.
144 NULL_TREE is returned otherwise. */
147 get_symbol_constant_value (tree sym
)
149 if (const_value_known_p (sym
))
151 tree val
= DECL_INITIAL (sym
);
154 val
= canonicalize_constructor_val (val
);
155 if (val
&& is_gimple_min_invariant (val
))
160 /* Variables declared 'const' without an initializer
161 have zero as the initializer if they may not be
162 overridden at link or run time. */
164 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
165 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
166 return build_zero_cst (TREE_TYPE (sym
));
173 /* Return true if we may propagate the address expression ADDR into the
174 dereference DEREF and cancel them. */
177 may_propagate_address_into_dereference (tree addr
, tree deref
)
179 gcc_assert (TREE_CODE (deref
) == MEM_REF
180 && TREE_CODE (addr
) == ADDR_EXPR
);
182 /* Don't propagate if ADDR's operand has incomplete type. */
183 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
186 /* If the address is invariant then we do not need to preserve restrict
187 qualifications. But we do need to preserve volatile qualifiers until
188 we can annotate the folded dereference itself properly. */
189 if (is_gimple_min_invariant (addr
)
190 && (!TREE_THIS_VOLATILE (deref
)
191 || TYPE_VOLATILE (TREE_TYPE (addr
))))
192 return useless_type_conversion_p (TREE_TYPE (deref
),
193 TREE_TYPE (TREE_OPERAND (addr
, 0)));
195 /* Else both the address substitution and the folding must result in
196 a valid useless type conversion sequence. */
197 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
199 && useless_type_conversion_p (TREE_TYPE (deref
),
200 TREE_TYPE (TREE_OPERAND (addr
, 0))));
204 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
205 BASE is an array type. OFFSET is a byte displacement.
207 LOC is the location of the original expression. */
210 maybe_fold_offset_to_array_ref (location_t loc
, tree base
, tree offset
)
212 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
213 tree array_type
, elt_type
, elt_size
;
216 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
217 measured in units of the size of elements type) from that ARRAY_REF).
218 We can't do anything if either is variable.
220 The case we handle here is *(&A[N]+O). */
221 if (TREE_CODE (base
) == ARRAY_REF
)
223 tree low_bound
= array_ref_low_bound (base
);
225 elt_offset
= TREE_OPERAND (base
, 1);
226 if (TREE_CODE (low_bound
) != INTEGER_CST
227 || TREE_CODE (elt_offset
) != INTEGER_CST
)
230 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
231 base
= TREE_OPERAND (base
, 0);
234 /* Ignore stupid user tricks of indexing non-array variables. */
235 array_type
= TREE_TYPE (base
);
236 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
238 elt_type
= TREE_TYPE (array_type
);
240 /* Use signed size type for intermediate computation on the index. */
241 idx_type
= ssizetype
;
243 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
244 element type (so we can use the alignment if it's not constant).
245 Otherwise, compute the offset as an index by using a division. If the
246 division isn't exact, then don't do anything. */
247 elt_size
= TYPE_SIZE_UNIT (elt_type
);
250 if (integer_zerop (offset
))
252 if (TREE_CODE (elt_size
) != INTEGER_CST
)
253 elt_size
= size_int (TYPE_ALIGN (elt_type
));
255 idx
= build_int_cst (idx_type
, 0);
259 unsigned HOST_WIDE_INT lquo
, lrem
;
260 HOST_WIDE_INT hquo
, hrem
;
263 /* The final array offset should be signed, so we need
264 to sign-extend the (possibly pointer) offset here
265 and use signed division. */
266 soffset
= double_int_sext (tree_to_double_int (offset
),
267 TYPE_PRECISION (TREE_TYPE (offset
)));
268 if (TREE_CODE (elt_size
) != INTEGER_CST
269 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
270 soffset
.low
, soffset
.high
,
271 TREE_INT_CST_LOW (elt_size
),
272 TREE_INT_CST_HIGH (elt_size
),
273 &lquo
, &hquo
, &lrem
, &hrem
)
277 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
280 /* Assume the low bound is zero. If there is a domain type, get the
281 low bound, if any, convert the index into that type, and add the
283 min_idx
= build_int_cst (idx_type
, 0);
284 domain_type
= TYPE_DOMAIN (array_type
);
287 idx_type
= domain_type
;
288 if (TYPE_MIN_VALUE (idx_type
))
289 min_idx
= TYPE_MIN_VALUE (idx_type
);
291 min_idx
= fold_convert (idx_type
, min_idx
);
293 if (TREE_CODE (min_idx
) != INTEGER_CST
)
296 elt_offset
= fold_convert (idx_type
, elt_offset
);
299 if (!integer_zerop (min_idx
))
300 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
301 if (!integer_zerop (elt_offset
))
302 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
304 /* Make sure to possibly truncate late after offsetting. */
305 idx
= fold_convert (idx_type
, idx
);
307 /* We don't want to construct access past array bounds. For example
310 should not be simplified into (*c)[14] or tree-vrp will
312 This is only an issue for multi-dimensional arrays. */
313 if (TREE_CODE (elt_type
) == ARRAY_TYPE
316 if (TYPE_MAX_VALUE (domain_type
)
317 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
318 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type
), idx
))
320 else if (TYPE_MIN_VALUE (domain_type
)
321 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
322 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
324 else if (compare_tree_int (idx
, 0) < 0)
329 tree t
= build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
330 SET_EXPR_LOCATION (t
, loc
);
336 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
337 LOC is the location of original expression.
339 Before attempting the conversion strip off existing ADDR_EXPRs. */
342 maybe_fold_offset_to_reference (location_t loc
, tree base
, tree offset
,
348 if (TREE_CODE (base
) != ADDR_EXPR
)
351 base
= TREE_OPERAND (base
, 0);
352 if (types_compatible_p (orig_type
, TREE_TYPE (base
))
353 && integer_zerop (offset
))
356 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
);
357 if (ret
&& types_compatible_p (orig_type
, TREE_TYPE (ret
)))
362 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
363 LOC is the location of the original expression. */
366 maybe_fold_offset_to_address (location_t loc
, tree addr
, tree offset
,
372 if (TREE_CODE (addr
) != ADDR_EXPR
)
374 base
= TREE_OPERAND (addr
, 0);
375 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
);
378 ret
= build_fold_addr_expr (ret
);
379 if (!useless_type_conversion_p (orig_type
, TREE_TYPE (ret
)))
381 SET_EXPR_LOCATION (ret
, loc
);
388 /* A quaint feature extant in our address arithmetic is that there
389 can be hidden type changes here. The type of the result need
390 not be the same as the type of the input pointer.
392 What we're after here is an expression of the form
393 (T *)(&array + const)
394 where array is OP0, const is OP1, RES_TYPE is T and
395 the cast doesn't actually exist, but is implicit in the
396 type of the POINTER_PLUS_EXPR. We'd like to turn this into
398 which may be able to propagate further. */
401 maybe_fold_stmt_addition (location_t loc
, tree res_type
, tree op0
, tree op1
)
406 /* The first operand should be an ADDR_EXPR. */
407 if (TREE_CODE (op0
) != ADDR_EXPR
)
409 op0
= TREE_OPERAND (op0
, 0);
411 /* It had better be a constant. */
412 if (TREE_CODE (op1
) != INTEGER_CST
)
414 /* Or op0 should now be A[0] and the non-constant offset defined
415 via a multiplication by the array element size. */
416 if (TREE_CODE (op0
) == ARRAY_REF
417 /* As we will end up creating a variable index array access
418 in the outermost array dimension make sure there isn't
419 a more inner array that the index could overflow to. */
420 && TREE_CODE (TREE_OPERAND (op0
, 0)) != ARRAY_REF
421 && integer_zerop (TREE_OPERAND (op0
, 1))
422 && TREE_CODE (op1
) == SSA_NAME
)
424 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
425 tree elsz
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
426 if (!host_integerp (elsz
, 1)
427 || !is_gimple_assign (offset_def
))
430 /* Do not build array references of something that we can't
431 see the true number of array dimensions for. */
432 if (!DECL_P (TREE_OPERAND (op0
, 0))
433 && !handled_component_p (TREE_OPERAND (op0
, 0)))
436 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
437 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
438 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
), elsz
))
439 return build_fold_addr_expr
440 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
441 TREE_OPERAND (op0
, 0),
442 gimple_assign_rhs1 (offset_def
),
443 TREE_OPERAND (op0
, 2),
444 TREE_OPERAND (op0
, 3)));
445 else if (integer_onep (elsz
)
446 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
447 return build_fold_addr_expr
448 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
449 TREE_OPERAND (op0
, 0),
451 TREE_OPERAND (op0
, 2),
452 TREE_OPERAND (op0
, 3)));
454 else if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
456 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0
))) != ARRAY_TYPE
457 && TREE_CODE (op1
) == SSA_NAME
)
459 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
460 tree elsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0
)));
461 if (!host_integerp (elsz
, 1)
462 || !is_gimple_assign (offset_def
))
465 /* Do not build array references of something that we can't
466 see the true number of array dimensions for. */
468 && !handled_component_p (op0
))
471 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
472 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
473 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
), elsz
))
474 return build_fold_addr_expr
475 (build4 (ARRAY_REF
, TREE_TYPE (TREE_TYPE (op0
)),
476 op0
, gimple_assign_rhs1 (offset_def
),
477 integer_zero_node
, NULL_TREE
));
478 else if (integer_onep (elsz
)
479 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
480 return build_fold_addr_expr
481 (build4 (ARRAY_REF
, TREE_TYPE (TREE_TYPE (op0
)),
483 integer_zero_node
, NULL_TREE
));
489 /* If the first operand is an ARRAY_REF, expand it so that we can fold
490 the offset into it. */
491 while (TREE_CODE (op0
) == ARRAY_REF
)
493 tree array_obj
= TREE_OPERAND (op0
, 0);
494 tree array_idx
= TREE_OPERAND (op0
, 1);
495 tree elt_type
= TREE_TYPE (op0
);
496 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
499 if (TREE_CODE (array_idx
) != INTEGER_CST
)
501 if (TREE_CODE (elt_size
) != INTEGER_CST
)
504 /* Un-bias the index by the min index of the array type. */
505 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
508 min_idx
= TYPE_MIN_VALUE (min_idx
);
511 if (TREE_CODE (min_idx
) != INTEGER_CST
)
514 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
515 if (!integer_zerop (min_idx
))
516 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
521 /* Convert the index to a byte offset. */
522 array_idx
= fold_convert (sizetype
, array_idx
);
523 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
525 /* Update the operands for the next round, or for folding. */
526 op1
= int_const_binop (PLUS_EXPR
,
531 ptd_type
= TREE_TYPE (res_type
);
532 /* If we want a pointer to void, reconstruct the reference from the
533 array element type. A pointer to that can be trivially converted
534 to void *. This happens as we fold (void *)(ptr p+ off). */
535 if (VOID_TYPE_P (ptd_type
)
536 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
537 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
539 /* At which point we can try some of the same things as for indirects. */
540 t
= maybe_fold_offset_to_array_ref (loc
, op0
, op1
);
543 t
= build_fold_addr_expr (t
);
544 if (!useless_type_conversion_p (res_type
, TREE_TYPE (t
)))
546 SET_EXPR_LOCATION (t
, loc
);
552 /* Subroutine of fold_stmt. We perform several simplifications of the
553 memory reference tree EXPR and make sure to re-gimplify them properly
554 after propagation of constant addresses. IS_LHS is true if the
555 reference is supposed to be an lvalue. */
558 maybe_fold_reference (tree expr
, bool is_lhs
)
564 && (result
= fold_const_aggregate_ref (expr
))
565 && is_gimple_min_invariant (result
))
568 /* ??? We might want to open-code the relevant remaining cases
569 to avoid using the generic fold. */
570 if (handled_component_p (*t
)
571 && CONSTANT_CLASS_P (TREE_OPERAND (*t
, 0)))
573 tree tem
= fold (*t
);
578 while (handled_component_p (*t
))
579 t
= &TREE_OPERAND (*t
, 0);
581 /* Fold back MEM_REFs to reference trees. */
582 if (TREE_CODE (*t
) == MEM_REF
583 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
584 && integer_zerop (TREE_OPERAND (*t
, 1))
585 && (TREE_THIS_VOLATILE (*t
)
586 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
587 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
588 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
589 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
590 /* We have to look out here to not drop a required conversion
591 from the rhs to the lhs if is_lhs, but we don't have the
592 rhs here to verify that. Thus require strict type
594 && types_compatible_p (TREE_TYPE (*t
),
595 TREE_TYPE (TREE_OPERAND
596 (TREE_OPERAND (*t
, 0), 0))))
599 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
600 tem
= maybe_fold_reference (expr
, is_lhs
);
605 /* Canonicalize MEM_REFs invariant address operand. */
606 else if (TREE_CODE (*t
) == MEM_REF
607 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
609 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
610 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
611 TREE_OPERAND (*t
, 0),
612 TREE_OPERAND (*t
, 1));
615 TREE_THIS_VOLATILE (tem
) = volatile_p
;
617 tem
= maybe_fold_reference (expr
, is_lhs
);
623 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
625 tree tem
= maybe_fold_tmr (*t
);
629 tem
= maybe_fold_reference (expr
, is_lhs
);
638 tree tem
= get_symbol_constant_value (*t
);
640 && useless_type_conversion_p (TREE_TYPE (*t
), TREE_TYPE (tem
)))
642 *t
= unshare_expr (tem
);
643 tem
= maybe_fold_reference (expr
, is_lhs
);
654 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
655 replacement rhs for the statement or NULL_TREE if no simplification
656 could be made. It is assumed that the operands have been previously
660 fold_gimple_assign (gimple_stmt_iterator
*si
)
662 gimple stmt
= gsi_stmt (*si
);
663 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
664 location_t loc
= gimple_location (stmt
);
666 tree result
= NULL_TREE
;
668 switch (get_gimple_rhs_class (subcode
))
670 case GIMPLE_SINGLE_RHS
:
672 tree rhs
= gimple_assign_rhs1 (stmt
);
674 /* Try to fold a conditional expression. */
675 if (TREE_CODE (rhs
) == COND_EXPR
)
677 tree op0
= COND_EXPR_COND (rhs
);
680 location_t cond_loc
= EXPR_LOCATION (rhs
);
682 if (COMPARISON_CLASS_P (op0
))
684 fold_defer_overflow_warnings ();
685 tem
= fold_binary_loc (cond_loc
,
686 TREE_CODE (op0
), TREE_TYPE (op0
),
687 TREE_OPERAND (op0
, 0),
688 TREE_OPERAND (op0
, 1));
689 /* This is actually a conditional expression, not a GIMPLE
690 conditional statement, however, the valid_gimple_rhs_p
691 test still applies. */
692 set
= (tem
&& is_gimple_condexpr (tem
)
693 && valid_gimple_rhs_p (tem
));
694 fold_undefer_overflow_warnings (set
, stmt
, 0);
696 else if (is_gimple_min_invariant (op0
))
705 result
= fold_build3_loc (cond_loc
, COND_EXPR
, TREE_TYPE (rhs
), tem
,
706 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
709 else if (REFERENCE_CLASS_P (rhs
))
710 return maybe_fold_reference (rhs
, false);
712 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
714 tree ref
= TREE_OPERAND (rhs
, 0);
715 tree tem
= maybe_fold_reference (ref
, true);
717 && TREE_CODE (tem
) == MEM_REF
718 && integer_zerop (TREE_OPERAND (tem
, 1)))
719 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
721 result
= fold_convert (TREE_TYPE (rhs
),
722 build_fold_addr_expr_loc (loc
, tem
));
723 else if (TREE_CODE (ref
) == MEM_REF
724 && integer_zerop (TREE_OPERAND (ref
, 1)))
725 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
728 else if (TREE_CODE (rhs
) == CONSTRUCTOR
729 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
730 && (CONSTRUCTOR_NELTS (rhs
)
731 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
733 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
737 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
738 if (TREE_CODE (val
) != INTEGER_CST
739 && TREE_CODE (val
) != REAL_CST
740 && TREE_CODE (val
) != FIXED_CST
)
743 return build_vector_from_ctor (TREE_TYPE (rhs
),
744 CONSTRUCTOR_ELTS (rhs
));
747 else if (DECL_P (rhs
))
748 return unshare_expr (get_symbol_constant_value (rhs
));
750 /* If we couldn't fold the RHS, hand over to the generic
752 if (result
== NULL_TREE
)
755 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
756 that may have been added by fold, and "useless" type
757 conversions that might now be apparent due to propagation. */
758 STRIP_USELESS_TYPE_CONVERSION (result
);
760 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
767 case GIMPLE_UNARY_RHS
:
769 tree rhs
= gimple_assign_rhs1 (stmt
);
771 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
774 /* If the operation was a conversion do _not_ mark a
775 resulting constant with TREE_OVERFLOW if the original
776 constant was not. These conversions have implementation
777 defined behavior and retaining the TREE_OVERFLOW flag
778 here would confuse later passes such as VRP. */
779 if (CONVERT_EXPR_CODE_P (subcode
)
780 && TREE_CODE (result
) == INTEGER_CST
781 && TREE_CODE (rhs
) == INTEGER_CST
)
782 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
784 STRIP_USELESS_TYPE_CONVERSION (result
);
785 if (valid_gimple_rhs_p (result
))
788 else if (CONVERT_EXPR_CODE_P (subcode
)
789 && POINTER_TYPE_P (gimple_expr_type (stmt
))
790 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
792 tree type
= gimple_expr_type (stmt
);
793 tree t
= maybe_fold_offset_to_address (loc
,
794 gimple_assign_rhs1 (stmt
),
795 integer_zero_node
, type
);
802 case GIMPLE_BINARY_RHS
:
803 /* Try to fold pointer addition. */
804 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
806 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
807 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
809 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
810 if (!useless_type_conversion_p
811 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
812 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
814 result
= maybe_fold_stmt_addition (gimple_location (stmt
),
816 gimple_assign_rhs1 (stmt
),
817 gimple_assign_rhs2 (stmt
));
821 result
= fold_binary_loc (loc
, subcode
,
822 TREE_TYPE (gimple_assign_lhs (stmt
)),
823 gimple_assign_rhs1 (stmt
),
824 gimple_assign_rhs2 (stmt
));
828 STRIP_USELESS_TYPE_CONVERSION (result
);
829 if (valid_gimple_rhs_p (result
))
832 /* Fold might have produced non-GIMPLE, so if we trust it blindly
833 we lose canonicalization opportunities. Do not go again
834 through fold here though, or the same non-GIMPLE will be
836 if (commutative_tree_code (subcode
)
837 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
838 gimple_assign_rhs2 (stmt
), false))
839 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
840 gimple_assign_rhs2 (stmt
),
841 gimple_assign_rhs1 (stmt
));
845 case GIMPLE_TERNARY_RHS
:
846 result
= fold_ternary_loc (loc
, subcode
,
847 TREE_TYPE (gimple_assign_lhs (stmt
)),
848 gimple_assign_rhs1 (stmt
),
849 gimple_assign_rhs2 (stmt
),
850 gimple_assign_rhs3 (stmt
));
854 STRIP_USELESS_TYPE_CONVERSION (result
);
855 if (valid_gimple_rhs_p (result
))
858 /* Fold might have produced non-GIMPLE, so if we trust it blindly
859 we lose canonicalization opportunities. Do not go again
860 through fold here though, or the same non-GIMPLE will be
862 if (commutative_ternary_tree_code (subcode
)
863 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
864 gimple_assign_rhs2 (stmt
), false))
865 return build3 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
866 gimple_assign_rhs2 (stmt
),
867 gimple_assign_rhs1 (stmt
),
868 gimple_assign_rhs3 (stmt
));
872 case GIMPLE_INVALID_RHS
:
879 /* Attempt to fold a conditional statement. Return true if any changes were
880 made. We only attempt to fold the condition expression, and do not perform
881 any transformation that would require alteration of the cfg. It is
882 assumed that the operands have been previously folded. */
885 fold_gimple_cond (gimple stmt
)
887 tree result
= fold_binary_loc (gimple_location (stmt
),
888 gimple_cond_code (stmt
),
890 gimple_cond_lhs (stmt
),
891 gimple_cond_rhs (stmt
));
895 STRIP_USELESS_TYPE_CONVERSION (result
);
896 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
898 gimple_cond_set_condition_from_tree (stmt
, result
);
906 /* Convert EXPR into a GIMPLE value suitable for substitution on the
907 RHS of an assignment. Insert the necessary statements before
908 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
909 is replaced. If the call is expected to produces a result, then it
910 is replaced by an assignment of the new RHS to the result variable.
911 If the result is to be ignored, then the call is replaced by a
912 GIMPLE_NOP. A proper VDEF chain is retained by making the first
913 VUSE and the last VDEF of the whole sequence be the same as the replaced
914 statement and using new SSA names for stores in between. */
917 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
920 tree tmp
= NULL_TREE
; /* Silence warning. */
921 gimple stmt
, new_stmt
;
922 gimple_stmt_iterator i
;
923 gimple_seq stmts
= gimple_seq_alloc();
924 struct gimplify_ctx gctx
;
926 gimple laststore
= NULL
;
929 stmt
= gsi_stmt (*si_p
);
931 gcc_assert (is_gimple_call (stmt
));
933 lhs
= gimple_call_lhs (stmt
);
934 reaching_vuse
= gimple_vuse (stmt
);
936 push_gimplify_context (&gctx
);
938 if (lhs
== NULL_TREE
)
940 gimplify_and_add (expr
, &stmts
);
941 /* We can end up with folding a memcpy of an empty class assignment
942 which gets optimized away by C++ gimplification. */
943 if (gimple_seq_empty_p (stmts
))
945 if (gimple_in_ssa_p (cfun
))
947 unlink_stmt_vdef (stmt
);
950 gsi_remove (si_p
, true);
955 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
957 pop_gimplify_context (NULL
);
959 if (gimple_has_location (stmt
))
960 annotate_all_with_location (stmts
, gimple_location (stmt
));
962 /* The replacement can expose previously unreferenced variables. */
963 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
967 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
970 new_stmt
= gsi_stmt (i
);
971 if (gimple_in_ssa_p (cfun
))
973 find_new_referenced_vars (new_stmt
);
974 mark_symbols_for_renaming (new_stmt
);
976 /* If the new statement has a VUSE, update it with exact SSA name we
977 know will reach this one. */
978 if (gimple_vuse (new_stmt
))
980 /* If we've also seen a previous store create a new VDEF for
981 the latter one, and make that the new reaching VUSE. */
984 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
985 gimple_set_vdef (laststore
, reaching_vuse
);
986 update_stmt (laststore
);
989 gimple_set_vuse (new_stmt
, reaching_vuse
);
990 gimple_set_modified (new_stmt
, true);
992 if (gimple_assign_single_p (new_stmt
)
993 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
995 laststore
= new_stmt
;
1000 if (lhs
== NULL_TREE
)
1002 /* If we replace a call without LHS that has a VDEF and our new
1003 sequence ends with a store we must make that store have the same
1004 vdef in order not to break the sequencing. This can happen
1005 for instance when folding memcpy calls into assignments. */
1006 if (gimple_vdef (stmt
) && laststore
)
1008 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
1009 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1010 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
1011 update_stmt (laststore
);
1013 else if (gimple_in_ssa_p (cfun
))
1015 unlink_stmt_vdef (stmt
);
1016 release_defs (stmt
);
1024 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
1027 if (laststore
&& is_gimple_reg (lhs
))
1029 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
1030 update_stmt (laststore
);
1031 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1032 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
1037 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
1038 gimple_set_vdef (laststore
, reaching_vuse
);
1039 update_stmt (laststore
);
1042 new_stmt
= gimple_build_assign (lhs
, tmp
);
1043 if (!is_gimple_reg (tmp
))
1044 gimple_set_vuse (new_stmt
, reaching_vuse
);
1045 if (!is_gimple_reg (lhs
))
1047 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1048 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1049 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = new_stmt
;
1051 else if (reaching_vuse
== gimple_vuse (stmt
))
1052 unlink_stmt_vdef (stmt
);
1055 gimple_set_location (new_stmt
, gimple_location (stmt
));
1056 gsi_replace (si_p
, new_stmt
, false);
1059 /* Return the string length, maximum string length or maximum value of
1061 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1062 is not NULL and, for TYPE == 0, its value is not equal to the length
1063 we determine or if we are unable to determine the length or value,
1064 return false. VISITED is a bitmap of visited variables.
1065 TYPE is 0 if string length should be returned, 1 for maximum string
1066 length and 2 for maximum value ARG can have. */
1069 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
1074 if (TREE_CODE (arg
) != SSA_NAME
)
1076 if (TREE_CODE (arg
) == COND_EXPR
)
1077 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
1078 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
1079 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1080 else if (TREE_CODE (arg
) == ADDR_EXPR
1081 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1082 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1084 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1085 if (TREE_CODE (aop0
) == INDIRECT_REF
1086 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1087 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1088 length
, visited
, type
);
1094 if (TREE_CODE (val
) != INTEGER_CST
1095 || tree_int_cst_sgn (val
) < 0)
1099 val
= c_strlen (arg
, 1);
1107 if (TREE_CODE (*length
) != INTEGER_CST
1108 || TREE_CODE (val
) != INTEGER_CST
)
1111 if (tree_int_cst_lt (*length
, val
))
1115 else if (simple_cst_equal (val
, *length
) != 1)
1123 /* If we were already here, break the infinite cycle. */
1124 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
1128 def_stmt
= SSA_NAME_DEF_STMT (var
);
1130 switch (gimple_code (def_stmt
))
1133 /* The RHS of the statement defining VAR must either have a
1134 constant length or come from another SSA_NAME with a constant
1136 if (gimple_assign_single_p (def_stmt
)
1137 || gimple_assign_unary_nop_p (def_stmt
))
1139 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1140 return get_maxval_strlen (rhs
, length
, visited
, type
);
1146 /* All the arguments of the PHI node must have the same constant
1150 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1152 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1154 /* If this PHI has itself as an argument, we cannot
1155 determine the string length of this argument. However,
1156 if we can find a constant string length for the other
1157 PHI args then we can still be sure that this is a
1158 constant string length. So be optimistic and just
1159 continue with the next argument. */
1160 if (arg
== gimple_phi_result (def_stmt
))
1163 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1175 /* Fold builtin call in statement STMT. Returns a simplified tree.
1176 We may return a non-constant expression, including another call
1177 to a different function and with different arguments, e.g.,
1178 substituting memcpy for strcpy when the string length is known.
1179 Note that some builtins expand into inline code that may not
1180 be valid in GIMPLE. Callers must take care. */
1183 gimple_fold_builtin (gimple stmt
)
1185 tree result
, val
[3];
1191 location_t loc
= gimple_location (stmt
);
1193 gcc_assert (is_gimple_call (stmt
));
1195 ignore
= (gimple_call_lhs (stmt
) == NULL
);
1197 /* First try the generic builtin folder. If that succeeds, return the
1199 result
= fold_call_stmt (stmt
, ignore
);
1203 STRIP_NOPS (result
);
1207 /* Ignore MD builtins. */
1208 callee
= gimple_call_fndecl (stmt
);
1209 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1212 /* If the builtin could not be folded, and it has no argument list,
1214 nargs
= gimple_call_num_args (stmt
);
1218 /* Limit the work only for builtins we know how to simplify. */
1219 switch (DECL_FUNCTION_CODE (callee
))
1221 case BUILT_IN_STRLEN
:
1222 case BUILT_IN_FPUTS
:
1223 case BUILT_IN_FPUTS_UNLOCKED
:
1227 case BUILT_IN_STRCPY
:
1228 case BUILT_IN_STRNCPY
:
1232 case BUILT_IN_MEMCPY_CHK
:
1233 case BUILT_IN_MEMPCPY_CHK
:
1234 case BUILT_IN_MEMMOVE_CHK
:
1235 case BUILT_IN_MEMSET_CHK
:
1236 case BUILT_IN_STRNCPY_CHK
:
1240 case BUILT_IN_STRCPY_CHK
:
1241 case BUILT_IN_STPCPY_CHK
:
1245 case BUILT_IN_SNPRINTF_CHK
:
1246 case BUILT_IN_VSNPRINTF_CHK
:
1254 if (arg_idx
>= nargs
)
1257 /* Try to use the dataflow information gathered by the CCP process. */
1258 visited
= BITMAP_ALLOC (NULL
);
1259 bitmap_clear (visited
);
1261 memset (val
, 0, sizeof (val
));
1262 a
= gimple_call_arg (stmt
, arg_idx
);
1263 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
1264 val
[arg_idx
] = NULL_TREE
;
1266 BITMAP_FREE (visited
);
1269 switch (DECL_FUNCTION_CODE (callee
))
1271 case BUILT_IN_STRLEN
:
1272 if (val
[0] && nargs
== 1)
1275 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
1277 /* If the result is not a valid gimple value, or not a cast
1278 of a valid gimple value, then we cannot use the result. */
1279 if (is_gimple_val (new_val
)
1280 || (CONVERT_EXPR_P (new_val
)
1281 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
1286 case BUILT_IN_STRCPY
:
1287 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
1288 result
= fold_builtin_strcpy (loc
, callee
,
1289 gimple_call_arg (stmt
, 0),
1290 gimple_call_arg (stmt
, 1),
1294 case BUILT_IN_STRNCPY
:
1295 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1296 result
= fold_builtin_strncpy (loc
, callee
,
1297 gimple_call_arg (stmt
, 0),
1298 gimple_call_arg (stmt
, 1),
1299 gimple_call_arg (stmt
, 2),
1303 case BUILT_IN_FPUTS
:
1305 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1306 gimple_call_arg (stmt
, 1),
1307 ignore
, false, val
[0]);
1310 case BUILT_IN_FPUTS_UNLOCKED
:
1312 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1313 gimple_call_arg (stmt
, 1),
1314 ignore
, true, val
[0]);
1317 case BUILT_IN_MEMCPY_CHK
:
1318 case BUILT_IN_MEMPCPY_CHK
:
1319 case BUILT_IN_MEMMOVE_CHK
:
1320 case BUILT_IN_MEMSET_CHK
:
1321 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1322 result
= fold_builtin_memory_chk (loc
, callee
,
1323 gimple_call_arg (stmt
, 0),
1324 gimple_call_arg (stmt
, 1),
1325 gimple_call_arg (stmt
, 2),
1326 gimple_call_arg (stmt
, 3),
1328 DECL_FUNCTION_CODE (callee
));
1331 case BUILT_IN_STRCPY_CHK
:
1332 case BUILT_IN_STPCPY_CHK
:
1333 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1334 result
= fold_builtin_stxcpy_chk (loc
, callee
,
1335 gimple_call_arg (stmt
, 0),
1336 gimple_call_arg (stmt
, 1),
1337 gimple_call_arg (stmt
, 2),
1339 DECL_FUNCTION_CODE (callee
));
1342 case BUILT_IN_STRNCPY_CHK
:
1343 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1344 result
= fold_builtin_strncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1345 gimple_call_arg (stmt
, 1),
1346 gimple_call_arg (stmt
, 2),
1347 gimple_call_arg (stmt
, 3),
1351 case BUILT_IN_SNPRINTF_CHK
:
1352 case BUILT_IN_VSNPRINTF_CHK
:
1353 if (val
[1] && is_gimple_val (val
[1]))
1354 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1355 DECL_FUNCTION_CODE (callee
));
1362 if (result
&& ignore
)
1363 result
= fold_ignored_result (result
);
1367 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1368 it is found or NULL_TREE if it is not. */
1371 get_base_binfo_for_type (tree binfo
, tree type
)
1375 tree res
= NULL_TREE
;
1377 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
1378 if (TREE_TYPE (base_binfo
) == type
)
1387 /* Return a binfo describing the part of object referenced by expression REF.
1388 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1389 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1390 a simple declaration, indirect reference or an SSA_NAME. If the function
1391 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1392 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1393 Otherwise the first non-artificial field declaration or the base declaration
1394 will be examined to get the encapsulating type. */
1397 gimple_get_relevant_ref_binfo (tree ref
, tree known_binfo
)
1401 if (TREE_CODE (ref
) == COMPONENT_REF
)
1405 tree field
= TREE_OPERAND (ref
, 1);
1407 if (!DECL_ARTIFICIAL (field
))
1409 tree type
= TREE_TYPE (field
);
1410 if (TREE_CODE (type
) == RECORD_TYPE
)
1411 return TYPE_BINFO (type
);
1416 par_type
= TREE_TYPE (TREE_OPERAND (ref
, 0));
1417 binfo
= TYPE_BINFO (par_type
);
1419 || BINFO_N_BASE_BINFOS (binfo
) == 0)
1422 /* Offset 0 indicates the primary base, whose vtable contents are
1423 represented in the binfo for the derived class. */
1424 if (int_bit_position (field
) != 0)
1428 /* Get descendant binfo. */
1429 d_binfo
= gimple_get_relevant_ref_binfo (TREE_OPERAND (ref
, 0),
1433 return get_base_binfo_for_type (d_binfo
, TREE_TYPE (field
));
1436 ref
= TREE_OPERAND (ref
, 0);
1438 else if (DECL_P (ref
) && TREE_CODE (TREE_TYPE (ref
)) == RECORD_TYPE
)
1439 return TYPE_BINFO (TREE_TYPE (ref
));
1440 else if (known_binfo
1441 && (TREE_CODE (ref
) == SSA_NAME
1442 || TREE_CODE (ref
) == MEM_REF
))
1449 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1450 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1451 KNOWN_BINFO carries the binfo describing the true type of
1452 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1453 with a this adjustment, the constant which should be added to this pointer
1454 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1455 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1458 gimple_get_virt_mehtod_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
1459 tree
*delta
, bool refuse_thunks
)
1463 struct cgraph_node
*node
;
1465 v
= BINFO_VIRTUALS (known_binfo
);
1466 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1472 i
+= (TARGET_VTABLE_USES_DESCRIPTORS
1473 ? TARGET_VTABLE_USES_DESCRIPTORS
: 1);
1477 fndecl
= TREE_VALUE (v
);
1478 node
= cgraph_get_node_or_alias (fndecl
);
1481 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1482 thunks are represented by a constant in TREE_PURPOSE of items in
1483 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1486 FIXME: Remove the following condition once we are able to represent
1487 thunk information on call graph edges. */
1488 || (node
->same_body_alias
&& node
->thunk
.thunk_p
)))
1491 /* When cgraph node is missing and function is not public, we cannot
1492 devirtualize. This can happen in WHOPR when the actual method
1493 ends up in other partition, because we found devirtualization
1494 possibility too late. */
1495 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v
)))
1498 *delta
= TREE_PURPOSE (v
);
1499 gcc_checking_assert (host_integerp (*delta
, 0));
1503 /* Generate code adjusting the first parameter of a call statement determined
1507 gimple_adjust_this_by_delta (gimple_stmt_iterator
*gsi
, tree delta
)
1509 gimple call_stmt
= gsi_stmt (*gsi
);
1513 delta
= fold_convert (sizetype
, delta
);
1514 gcc_assert (gimple_call_num_args (call_stmt
) >= 1);
1515 parm
= gimple_call_arg (call_stmt
, 0);
1516 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm
)));
1517 tmp
= create_tmp_var (TREE_TYPE (parm
), NULL
);
1518 add_referenced_var (tmp
);
1520 tmp
= make_ssa_name (tmp
, NULL
);
1521 new_stmt
= gimple_build_assign_with_ops (POINTER_PLUS_EXPR
, tmp
, parm
, delta
);
1522 SSA_NAME_DEF_STMT (tmp
) = new_stmt
;
1523 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1524 gimple_call_set_arg (call_stmt
, 0, tmp
);
1527 /* Fold a call statement to OBJ_TYPE_REF to a direct call, if possible. GSI
1528 determines the statement, generating new statements is allowed only if
1529 INPLACE is false. Return true iff the statement was changed. */
1532 gimple_fold_obj_type_ref_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1534 gimple stmt
= gsi_stmt (*gsi
);
1535 tree ref
= gimple_call_fn (stmt
);
1536 tree obj
= OBJ_TYPE_REF_OBJECT (ref
);
1537 tree binfo
, fndecl
, delta
;
1538 HOST_WIDE_INT token
;
1540 if (TREE_CODE (obj
) == ADDR_EXPR
)
1541 obj
= TREE_OPERAND (obj
, 0);
1545 binfo
= gimple_get_relevant_ref_binfo (obj
, NULL_TREE
);
1548 token
= tree_low_cst (OBJ_TYPE_REF_TOKEN (ref
), 1);
1549 fndecl
= gimple_get_virt_mehtod_for_binfo (token
, binfo
, &delta
,
1554 if (integer_nonzerop (delta
))
1558 gimple_adjust_this_by_delta (gsi
, delta
);
1561 gimple_call_set_fndecl (stmt
, fndecl
);
1565 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1566 The statement may be replaced by another statement, e.g., if the call
1567 simplifies to a constant value. Return true if any changes were made.
1568 It is assumed that the operands have been previously folded. */
1571 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1573 gimple stmt
= gsi_stmt (*gsi
);
1575 tree callee
= gimple_call_fndecl (stmt
);
1577 /* Check for builtins that CCP can handle using information not
1578 available in the generic fold routines. */
1579 if (!inplace
&& callee
&& DECL_BUILT_IN (callee
))
1581 tree result
= gimple_fold_builtin (stmt
);
1585 if (!update_call_from_tree (gsi
, result
))
1586 gimplify_and_update_call_from_tree (gsi
, result
);
1592 /* ??? Should perhaps do this in fold proper. However, doing it
1593 there requires that we create a new CALL_EXPR, and that requires
1594 copying EH region info to the new node. Easier to just do it
1595 here where we can just smash the call operand. */
1596 callee
= gimple_call_fn (stmt
);
1597 if (TREE_CODE (callee
) == OBJ_TYPE_REF
)
1598 return gimple_fold_obj_type_ref_call (gsi
, inplace
);
1604 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1605 distinguishes both cases. */
1608 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1610 bool changed
= false;
1611 gimple stmt
= gsi_stmt (*gsi
);
1614 /* Fold the main computation performed by the statement. */
1615 switch (gimple_code (stmt
))
1619 unsigned old_num_ops
= gimple_num_ops (stmt
);
1620 tree new_rhs
= fold_gimple_assign (gsi
);
1621 tree lhs
= gimple_assign_lhs (stmt
);
1623 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1624 TREE_TYPE (new_rhs
)))
1625 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1628 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1630 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1637 changed
|= fold_gimple_cond (stmt
);
1641 /* Fold *& in call arguments. */
1642 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1643 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1645 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1648 gimple_call_set_arg (stmt
, i
, tmp
);
1652 changed
|= gimple_fold_call (gsi
, inplace
);
1656 /* Fold *& in asm operands. */
1657 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1659 tree link
= gimple_asm_output_op (stmt
, i
);
1660 tree op
= TREE_VALUE (link
);
1661 if (REFERENCE_CLASS_P (op
)
1662 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1664 TREE_VALUE (link
) = op
;
1668 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1670 tree link
= gimple_asm_input_op (stmt
, i
);
1671 tree op
= TREE_VALUE (link
);
1672 if (REFERENCE_CLASS_P (op
)
1673 && (op
= maybe_fold_reference (op
, false)) != NULL_TREE
)
1675 TREE_VALUE (link
) = op
;
1682 if (gimple_debug_bind_p (stmt
))
1684 tree val
= gimple_debug_bind_get_value (stmt
);
1686 && REFERENCE_CLASS_P (val
))
1688 tree tem
= maybe_fold_reference (val
, false);
1691 gimple_debug_bind_set_value (stmt
, tem
);
1701 stmt
= gsi_stmt (*gsi
);
1703 /* Fold *& on the lhs. */
1704 if (gimple_has_lhs (stmt
))
1706 tree lhs
= gimple_get_lhs (stmt
);
1707 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1709 tree new_lhs
= maybe_fold_reference (lhs
, true);
1712 gimple_set_lhs (stmt
, new_lhs
);
1721 /* Fold the statement pointed to by GSI. In some cases, this function may
1722 replace the whole statement with a new one. Returns true iff folding
1724 The statement pointed to by GSI should be in valid gimple form but may
1725 be in unfolded state as resulting from for example constant propagation
1726 which can produce *&x = 0. */
1729 fold_stmt (gimple_stmt_iterator
*gsi
)
1731 return fold_stmt_1 (gsi
, false);
1734 /* Perform the minimal folding on statement STMT. Only operations like
1735 *&x created by constant propagation are handled. The statement cannot
1736 be replaced with a new one. Return true if the statement was
1737 changed, false otherwise.
1738 The statement STMT should be in valid gimple form but may
1739 be in unfolded state as resulting from for example constant propagation
1740 which can produce *&x = 0. */
1743 fold_stmt_inplace (gimple stmt
)
1745 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1746 bool changed
= fold_stmt_1 (&gsi
, true);
1747 gcc_assert (gsi_stmt (gsi
) == stmt
);
1751 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1752 if EXPR is null or we don't know how.
1753 If non-null, the result always has boolean type. */
1756 canonicalize_bool (tree expr
, bool invert
)
1762 if (integer_nonzerop (expr
))
1763 return boolean_false_node
;
1764 else if (integer_zerop (expr
))
1765 return boolean_true_node
;
1766 else if (TREE_CODE (expr
) == SSA_NAME
)
1767 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1768 build_int_cst (TREE_TYPE (expr
), 0));
1769 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1770 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1772 TREE_OPERAND (expr
, 0),
1773 TREE_OPERAND (expr
, 1));
1779 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1781 if (integer_nonzerop (expr
))
1782 return boolean_true_node
;
1783 else if (integer_zerop (expr
))
1784 return boolean_false_node
;
1785 else if (TREE_CODE (expr
) == SSA_NAME
)
1786 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1787 build_int_cst (TREE_TYPE (expr
), 0));
1788 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1789 return fold_build2 (TREE_CODE (expr
),
1791 TREE_OPERAND (expr
, 0),
1792 TREE_OPERAND (expr
, 1));
1798 /* Check to see if a boolean expression EXPR is logically equivalent to the
1799 comparison (OP1 CODE OP2). Check for various identities involving
1803 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1804 const_tree op1
, const_tree op2
)
1808 /* The obvious case. */
1809 if (TREE_CODE (expr
) == code
1810 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1811 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1814 /* Check for comparing (name, name != 0) and the case where expr
1815 is an SSA_NAME with a definition matching the comparison. */
1816 if (TREE_CODE (expr
) == SSA_NAME
1817 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1819 if (operand_equal_p (expr
, op1
, 0))
1820 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1821 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1822 s
= SSA_NAME_DEF_STMT (expr
);
1823 if (is_gimple_assign (s
)
1824 && gimple_assign_rhs_code (s
) == code
1825 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1826 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1830 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1831 of name is a comparison, recurse. */
1832 if (TREE_CODE (op1
) == SSA_NAME
1833 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1835 s
= SSA_NAME_DEF_STMT (op1
);
1836 if (is_gimple_assign (s
)
1837 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1839 enum tree_code c
= gimple_assign_rhs_code (s
);
1840 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1841 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1842 return same_bool_comparison_p (expr
, c
,
1843 gimple_assign_rhs1 (s
),
1844 gimple_assign_rhs2 (s
));
1845 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1846 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1847 return same_bool_comparison_p (expr
,
1848 invert_tree_comparison (c
, false),
1849 gimple_assign_rhs1 (s
),
1850 gimple_assign_rhs2 (s
));
1856 /* Check to see if two boolean expressions OP1 and OP2 are logically
1860 same_bool_result_p (const_tree op1
, const_tree op2
)
1862 /* Simple cases first. */
1863 if (operand_equal_p (op1
, op2
, 0))
1866 /* Check the cases where at least one of the operands is a comparison.
1867 These are a bit smarter than operand_equal_p in that they apply some
1868 identifies on SSA_NAMEs. */
1869 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1870 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1871 TREE_OPERAND (op2
, 0),
1872 TREE_OPERAND (op2
, 1)))
1874 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1875 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1876 TREE_OPERAND (op1
, 0),
1877 TREE_OPERAND (op1
, 1)))
1884 /* Forward declarations for some mutually recursive functions. */
1887 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1888 enum tree_code code2
, tree op2a
, tree op2b
);
1890 and_var_with_comparison (tree var
, bool invert
,
1891 enum tree_code code2
, tree op2a
, tree op2b
);
1893 and_var_with_comparison_1 (gimple stmt
,
1894 enum tree_code code2
, tree op2a
, tree op2b
);
1896 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1897 enum tree_code code2
, tree op2a
, tree op2b
);
1899 or_var_with_comparison (tree var
, bool invert
,
1900 enum tree_code code2
, tree op2a
, tree op2b
);
1902 or_var_with_comparison_1 (gimple stmt
,
1903 enum tree_code code2
, tree op2a
, tree op2b
);
1905 /* Helper function for and_comparisons_1: try to simplify the AND of the
1906 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1907 If INVERT is true, invert the value of the VAR before doing the AND.
1908 Return NULL_EXPR if we can't simplify this to a single expression. */
1911 and_var_with_comparison (tree var
, bool invert
,
1912 enum tree_code code2
, tree op2a
, tree op2b
)
1915 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1917 /* We can only deal with variables whose definitions are assignments. */
1918 if (!is_gimple_assign (stmt
))
1921 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1922 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1923 Then we only have to consider the simpler non-inverted cases. */
1925 t
= or_var_with_comparison_1 (stmt
,
1926 invert_tree_comparison (code2
, false),
1929 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1930 return canonicalize_bool (t
, invert
);
1933 /* Try to simplify the AND of the ssa variable defined by the assignment
1934 STMT with the comparison specified by (OP2A CODE2 OP2B).
1935 Return NULL_EXPR if we can't simplify this to a single expression. */
1938 and_var_with_comparison_1 (gimple stmt
,
1939 enum tree_code code2
, tree op2a
, tree op2b
)
1941 tree var
= gimple_assign_lhs (stmt
);
1942 tree true_test_var
= NULL_TREE
;
1943 tree false_test_var
= NULL_TREE
;
1944 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1946 /* Check for identities like (var AND (var == 0)) => false. */
1947 if (TREE_CODE (op2a
) == SSA_NAME
1948 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1950 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1951 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1953 true_test_var
= op2a
;
1954 if (var
== true_test_var
)
1957 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1958 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1960 false_test_var
= op2a
;
1961 if (var
== false_test_var
)
1962 return boolean_false_node
;
1966 /* If the definition is a comparison, recurse on it. */
1967 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1969 tree t
= and_comparisons_1 (innercode
,
1970 gimple_assign_rhs1 (stmt
),
1971 gimple_assign_rhs2 (stmt
),
1979 /* If the definition is an AND or OR expression, we may be able to
1980 simplify by reassociating. */
1981 if (innercode
== TRUTH_AND_EXPR
1982 || innercode
== TRUTH_OR_EXPR
1983 || (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1984 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
)))
1986 tree inner1
= gimple_assign_rhs1 (stmt
);
1987 tree inner2
= gimple_assign_rhs2 (stmt
);
1990 tree partial
= NULL_TREE
;
1991 bool is_and
= (innercode
== TRUTH_AND_EXPR
|| innercode
== BIT_AND_EXPR
);
1993 /* Check for boolean identities that don't require recursive examination
1995 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1996 inner1 AND (inner1 OR inner2) => inner1
1997 !inner1 AND (inner1 AND inner2) => false
1998 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1999 Likewise for similar cases involving inner2. */
2000 if (inner1
== true_test_var
)
2001 return (is_and
? var
: inner1
);
2002 else if (inner2
== true_test_var
)
2003 return (is_and
? var
: inner2
);
2004 else if (inner1
== false_test_var
)
2006 ? boolean_false_node
2007 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2008 else if (inner2
== false_test_var
)
2010 ? boolean_false_node
2011 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2013 /* Next, redistribute/reassociate the AND across the inner tests.
2014 Compute the first partial result, (inner1 AND (op2a code op2b)) */
2015 if (TREE_CODE (inner1
) == SSA_NAME
2016 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2017 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2018 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
2019 gimple_assign_rhs1 (s
),
2020 gimple_assign_rhs2 (s
),
2021 code2
, op2a
, op2b
)))
2023 /* Handle the AND case, where we are reassociating:
2024 (inner1 AND inner2) AND (op2a code2 op2b)
2026 If the partial result t is a constant, we win. Otherwise
2027 continue on to try reassociating with the other inner test. */
2030 if (integer_onep (t
))
2032 else if (integer_zerop (t
))
2033 return boolean_false_node
;
2036 /* Handle the OR case, where we are redistributing:
2037 (inner1 OR inner2) AND (op2a code2 op2b)
2038 => (t OR (inner2 AND (op2a code2 op2b))) */
2039 else if (integer_onep (t
))
2040 return boolean_true_node
;
2042 /* Save partial result for later. */
2046 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2047 if (TREE_CODE (inner2
) == SSA_NAME
2048 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2049 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2050 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
2051 gimple_assign_rhs1 (s
),
2052 gimple_assign_rhs2 (s
),
2053 code2
, op2a
, op2b
)))
2055 /* Handle the AND case, where we are reassociating:
2056 (inner1 AND inner2) AND (op2a code2 op2b)
2057 => (inner1 AND t) */
2060 if (integer_onep (t
))
2062 else if (integer_zerop (t
))
2063 return boolean_false_node
;
2064 /* If both are the same, we can apply the identity
2066 else if (partial
&& same_bool_result_p (t
, partial
))
2070 /* Handle the OR case. where we are redistributing:
2071 (inner1 OR inner2) AND (op2a code2 op2b)
2072 => (t OR (inner1 AND (op2a code2 op2b)))
2073 => (t OR partial) */
2076 if (integer_onep (t
))
2077 return boolean_true_node
;
2080 /* We already got a simplification for the other
2081 operand to the redistributed OR expression. The
2082 interesting case is when at least one is false.
2083 Or, if both are the same, we can apply the identity
2085 if (integer_zerop (partial
))
2087 else if (integer_zerop (t
))
2089 else if (same_bool_result_p (t
, partial
))
2098 /* Try to simplify the AND of two comparisons defined by
2099 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2100 If this can be done without constructing an intermediate value,
2101 return the resulting tree; otherwise NULL_TREE is returned.
2102 This function is deliberately asymmetric as it recurses on SSA_DEFs
2103 in the first comparison but not the second. */
2106 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2107 enum tree_code code2
, tree op2a
, tree op2b
)
2109 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2110 if (operand_equal_p (op1a
, op2a
, 0)
2111 && operand_equal_p (op1b
, op2b
, 0))
2113 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2114 TRUTH_ANDIF_EXPR
, code1
, code2
,
2115 boolean_type_node
, op1a
, op1b
);
2120 /* Likewise the swapped case of the above. */
2121 if (operand_equal_p (op1a
, op2b
, 0)
2122 && operand_equal_p (op1b
, op2a
, 0))
2124 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2125 TRUTH_ANDIF_EXPR
, code1
,
2126 swap_tree_comparison (code2
),
2127 boolean_type_node
, op1a
, op1b
);
2132 /* If both comparisons are of the same value against constants, we might
2133 be able to merge them. */
2134 if (operand_equal_p (op1a
, op2a
, 0)
2135 && TREE_CODE (op1b
) == INTEGER_CST
2136 && TREE_CODE (op2b
) == INTEGER_CST
)
2138 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2140 /* If we have (op1a == op1b), we should either be able to
2141 return that or FALSE, depending on whether the constant op1b
2142 also satisfies the other comparison against op2b. */
2143 if (code1
== EQ_EXPR
)
2149 case EQ_EXPR
: val
= (cmp
== 0); break;
2150 case NE_EXPR
: val
= (cmp
!= 0); break;
2151 case LT_EXPR
: val
= (cmp
< 0); break;
2152 case GT_EXPR
: val
= (cmp
> 0); break;
2153 case LE_EXPR
: val
= (cmp
<= 0); break;
2154 case GE_EXPR
: val
= (cmp
>= 0); break;
2155 default: done
= false;
2160 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2162 return boolean_false_node
;
2165 /* Likewise if the second comparison is an == comparison. */
2166 else if (code2
== EQ_EXPR
)
2172 case EQ_EXPR
: val
= (cmp
== 0); break;
2173 case NE_EXPR
: val
= (cmp
!= 0); break;
2174 case LT_EXPR
: val
= (cmp
> 0); break;
2175 case GT_EXPR
: val
= (cmp
< 0); break;
2176 case LE_EXPR
: val
= (cmp
>= 0); break;
2177 case GE_EXPR
: val
= (cmp
<= 0); break;
2178 default: done
= false;
2183 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2185 return boolean_false_node
;
2189 /* Same business with inequality tests. */
2190 else if (code1
== NE_EXPR
)
2195 case EQ_EXPR
: val
= (cmp
!= 0); break;
2196 case NE_EXPR
: val
= (cmp
== 0); break;
2197 case LT_EXPR
: val
= (cmp
>= 0); break;
2198 case GT_EXPR
: val
= (cmp
<= 0); break;
2199 case LE_EXPR
: val
= (cmp
> 0); break;
2200 case GE_EXPR
: val
= (cmp
< 0); break;
2205 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2207 else if (code2
== NE_EXPR
)
2212 case EQ_EXPR
: val
= (cmp
== 0); break;
2213 case NE_EXPR
: val
= (cmp
!= 0); break;
2214 case LT_EXPR
: val
= (cmp
<= 0); break;
2215 case GT_EXPR
: val
= (cmp
>= 0); break;
2216 case LE_EXPR
: val
= (cmp
< 0); break;
2217 case GE_EXPR
: val
= (cmp
> 0); break;
2222 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2225 /* Chose the more restrictive of two < or <= comparisons. */
2226 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2227 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2229 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2230 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2232 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2235 /* Likewise chose the more restrictive of two > or >= comparisons. */
2236 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2237 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2239 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2240 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2242 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2245 /* Check for singleton ranges. */
2247 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
2248 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
2249 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
2251 /* Check for disjoint ranges. */
2253 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2254 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2255 return boolean_false_node
;
2257 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2258 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2259 return boolean_false_node
;
2262 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2263 NAME's definition is a truth value. See if there are any simplifications
2264 that can be done against the NAME's definition. */
2265 if (TREE_CODE (op1a
) == SSA_NAME
2266 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2267 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2269 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2270 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2271 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2272 switch (gimple_code (stmt
))
2275 /* Try to simplify by copy-propagating the definition. */
2276 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2279 /* If every argument to the PHI produces the same result when
2280 ANDed with the second comparison, we win.
2281 Do not do this unless the type is bool since we need a bool
2282 result here anyway. */
2283 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2285 tree result
= NULL_TREE
;
2287 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2289 tree arg
= gimple_phi_arg_def (stmt
, i
);
2291 /* If this PHI has itself as an argument, ignore it.
2292 If all the other args produce the same result,
2294 if (arg
== gimple_phi_result (stmt
))
2296 else if (TREE_CODE (arg
) == INTEGER_CST
)
2298 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
2301 result
= boolean_false_node
;
2302 else if (!integer_zerop (result
))
2306 result
= fold_build2 (code2
, boolean_type_node
,
2308 else if (!same_bool_comparison_p (result
,
2312 else if (TREE_CODE (arg
) == SSA_NAME
)
2314 tree temp
= and_var_with_comparison (arg
, invert
,
2320 else if (!same_bool_result_p (result
, temp
))
2336 /* Try to simplify the AND of two comparisons, specified by
2337 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2338 If this can be simplified to a single expression (without requiring
2339 introducing more SSA variables to hold intermediate values),
2340 return the resulting tree. Otherwise return NULL_TREE.
2341 If the result expression is non-null, it has boolean type. */
2344 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2345 enum tree_code code2
, tree op2a
, tree op2b
)
2347 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2351 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2354 /* Helper function for or_comparisons_1: try to simplify the OR of the
2355 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2356 If INVERT is true, invert the value of VAR before doing the OR.
2357 Return NULL_EXPR if we can't simplify this to a single expression. */
2360 or_var_with_comparison (tree var
, bool invert
,
2361 enum tree_code code2
, tree op2a
, tree op2b
)
2364 gimple stmt
= SSA_NAME_DEF_STMT (var
);
2366 /* We can only deal with variables whose definitions are assignments. */
2367 if (!is_gimple_assign (stmt
))
2370 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2371 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2372 Then we only have to consider the simpler non-inverted cases. */
2374 t
= and_var_with_comparison_1 (stmt
,
2375 invert_tree_comparison (code2
, false),
2378 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2379 return canonicalize_bool (t
, invert
);
2382 /* Try to simplify the OR of the ssa variable defined by the assignment
2383 STMT with the comparison specified by (OP2A CODE2 OP2B).
2384 Return NULL_EXPR if we can't simplify this to a single expression. */
2387 or_var_with_comparison_1 (gimple stmt
,
2388 enum tree_code code2
, tree op2a
, tree op2b
)
2390 tree var
= gimple_assign_lhs (stmt
);
2391 tree true_test_var
= NULL_TREE
;
2392 tree false_test_var
= NULL_TREE
;
2393 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2395 /* Check for identities like (var OR (var != 0)) => true . */
2396 if (TREE_CODE (op2a
) == SSA_NAME
2397 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2399 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2400 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2402 true_test_var
= op2a
;
2403 if (var
== true_test_var
)
2406 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2407 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2409 false_test_var
= op2a
;
2410 if (var
== false_test_var
)
2411 return boolean_true_node
;
2415 /* If the definition is a comparison, recurse on it. */
2416 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2418 tree t
= or_comparisons_1 (innercode
,
2419 gimple_assign_rhs1 (stmt
),
2420 gimple_assign_rhs2 (stmt
),
2428 /* If the definition is an AND or OR expression, we may be able to
2429 simplify by reassociating. */
2430 if (innercode
== TRUTH_AND_EXPR
2431 || innercode
== TRUTH_OR_EXPR
2432 || (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2433 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
)))
2435 tree inner1
= gimple_assign_rhs1 (stmt
);
2436 tree inner2
= gimple_assign_rhs2 (stmt
);
2439 tree partial
= NULL_TREE
;
2440 bool is_or
= (innercode
== TRUTH_OR_EXPR
|| innercode
== BIT_IOR_EXPR
);
2442 /* Check for boolean identities that don't require recursive examination
2444 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2445 inner1 OR (inner1 AND inner2) => inner1
2446 !inner1 OR (inner1 OR inner2) => true
2447 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2449 if (inner1
== true_test_var
)
2450 return (is_or
? var
: inner1
);
2451 else if (inner2
== true_test_var
)
2452 return (is_or
? var
: inner2
);
2453 else if (inner1
== false_test_var
)
2456 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2457 else if (inner2
== false_test_var
)
2460 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2462 /* Next, redistribute/reassociate the OR across the inner tests.
2463 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2464 if (TREE_CODE (inner1
) == SSA_NAME
2465 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2466 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2467 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2468 gimple_assign_rhs1 (s
),
2469 gimple_assign_rhs2 (s
),
2470 code2
, op2a
, op2b
)))
2472 /* Handle the OR case, where we are reassociating:
2473 (inner1 OR inner2) OR (op2a code2 op2b)
2475 If the partial result t is a constant, we win. Otherwise
2476 continue on to try reassociating with the other inner test. */
2479 if (integer_onep (t
))
2480 return boolean_true_node
;
2481 else if (integer_zerop (t
))
2485 /* Handle the AND case, where we are redistributing:
2486 (inner1 AND inner2) OR (op2a code2 op2b)
2487 => (t AND (inner2 OR (op2a code op2b))) */
2488 else if (integer_zerop (t
))
2489 return boolean_false_node
;
2491 /* Save partial result for later. */
2495 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2496 if (TREE_CODE (inner2
) == SSA_NAME
2497 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2498 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2499 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2500 gimple_assign_rhs1 (s
),
2501 gimple_assign_rhs2 (s
),
2502 code2
, op2a
, op2b
)))
2504 /* Handle the OR case, where we are reassociating:
2505 (inner1 OR inner2) OR (op2a code2 op2b)
2507 => (t OR partial) */
2510 if (integer_zerop (t
))
2512 else if (integer_onep (t
))
2513 return boolean_true_node
;
2514 /* If both are the same, we can apply the identity
2516 else if (partial
&& same_bool_result_p (t
, partial
))
2520 /* Handle the AND case, where we are redistributing:
2521 (inner1 AND inner2) OR (op2a code2 op2b)
2522 => (t AND (inner1 OR (op2a code2 op2b)))
2523 => (t AND partial) */
2526 if (integer_zerop (t
))
2527 return boolean_false_node
;
2530 /* We already got a simplification for the other
2531 operand to the redistributed AND expression. The
2532 interesting case is when at least one is true.
2533 Or, if both are the same, we can apply the identity
2535 if (integer_onep (partial
))
2537 else if (integer_onep (t
))
2539 else if (same_bool_result_p (t
, partial
))
2548 /* Try to simplify the OR of two comparisons defined by
2549 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2550 If this can be done without constructing an intermediate value,
2551 return the resulting tree; otherwise NULL_TREE is returned.
2552 This function is deliberately asymmetric as it recurses on SSA_DEFs
2553 in the first comparison but not the second. */
2556 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2557 enum tree_code code2
, tree op2a
, tree op2b
)
2559 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2560 if (operand_equal_p (op1a
, op2a
, 0)
2561 && operand_equal_p (op1b
, op2b
, 0))
2563 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2564 TRUTH_ORIF_EXPR
, code1
, code2
,
2565 boolean_type_node
, op1a
, op1b
);
2570 /* Likewise the swapped case of the above. */
2571 if (operand_equal_p (op1a
, op2b
, 0)
2572 && operand_equal_p (op1b
, op2a
, 0))
2574 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2575 TRUTH_ORIF_EXPR
, code1
,
2576 swap_tree_comparison (code2
),
2577 boolean_type_node
, op1a
, op1b
);
2582 /* If both comparisons are of the same value against constants, we might
2583 be able to merge them. */
2584 if (operand_equal_p (op1a
, op2a
, 0)
2585 && TREE_CODE (op1b
) == INTEGER_CST
2586 && TREE_CODE (op2b
) == INTEGER_CST
)
2588 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2590 /* If we have (op1a != op1b), we should either be able to
2591 return that or TRUE, depending on whether the constant op1b
2592 also satisfies the other comparison against op2b. */
2593 if (code1
== NE_EXPR
)
2599 case EQ_EXPR
: val
= (cmp
== 0); break;
2600 case NE_EXPR
: val
= (cmp
!= 0); break;
2601 case LT_EXPR
: val
= (cmp
< 0); break;
2602 case GT_EXPR
: val
= (cmp
> 0); break;
2603 case LE_EXPR
: val
= (cmp
<= 0); break;
2604 case GE_EXPR
: val
= (cmp
>= 0); break;
2605 default: done
= false;
2610 return boolean_true_node
;
2612 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2615 /* Likewise if the second comparison is a != comparison. */
2616 else if (code2
== NE_EXPR
)
2622 case EQ_EXPR
: val
= (cmp
== 0); break;
2623 case NE_EXPR
: val
= (cmp
!= 0); break;
2624 case LT_EXPR
: val
= (cmp
> 0); break;
2625 case GT_EXPR
: val
= (cmp
< 0); break;
2626 case LE_EXPR
: val
= (cmp
>= 0); break;
2627 case GE_EXPR
: val
= (cmp
<= 0); break;
2628 default: done
= false;
2633 return boolean_true_node
;
2635 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2639 /* See if an equality test is redundant with the other comparison. */
2640 else if (code1
== EQ_EXPR
)
2645 case EQ_EXPR
: val
= (cmp
== 0); break;
2646 case NE_EXPR
: val
= (cmp
!= 0); break;
2647 case LT_EXPR
: val
= (cmp
< 0); break;
2648 case GT_EXPR
: val
= (cmp
> 0); break;
2649 case LE_EXPR
: val
= (cmp
<= 0); break;
2650 case GE_EXPR
: val
= (cmp
>= 0); break;
2655 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2657 else if (code2
== EQ_EXPR
)
2662 case EQ_EXPR
: val
= (cmp
== 0); break;
2663 case NE_EXPR
: val
= (cmp
!= 0); break;
2664 case LT_EXPR
: val
= (cmp
> 0); break;
2665 case GT_EXPR
: val
= (cmp
< 0); break;
2666 case LE_EXPR
: val
= (cmp
>= 0); break;
2667 case GE_EXPR
: val
= (cmp
<= 0); break;
2672 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2675 /* Chose the less restrictive of two < or <= comparisons. */
2676 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2677 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2679 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2680 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2682 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2685 /* Likewise chose the less restrictive of two > or >= comparisons. */
2686 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2687 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2689 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2690 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2692 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2695 /* Check for singleton ranges. */
2697 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2698 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2699 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2701 /* Check for less/greater pairs that don't restrict the range at all. */
2703 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2704 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2705 return boolean_true_node
;
2707 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2708 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2709 return boolean_true_node
;
2712 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2713 NAME's definition is a truth value. See if there are any simplifications
2714 that can be done against the NAME's definition. */
2715 if (TREE_CODE (op1a
) == SSA_NAME
2716 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2717 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2719 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2720 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2721 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2722 switch (gimple_code (stmt
))
2725 /* Try to simplify by copy-propagating the definition. */
2726 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2729 /* If every argument to the PHI produces the same result when
2730 ORed with the second comparison, we win.
2731 Do not do this unless the type is bool since we need a bool
2732 result here anyway. */
2733 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2735 tree result
= NULL_TREE
;
2737 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2739 tree arg
= gimple_phi_arg_def (stmt
, i
);
2741 /* If this PHI has itself as an argument, ignore it.
2742 If all the other args produce the same result,
2744 if (arg
== gimple_phi_result (stmt
))
2746 else if (TREE_CODE (arg
) == INTEGER_CST
)
2748 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2751 result
= boolean_true_node
;
2752 else if (!integer_onep (result
))
2756 result
= fold_build2 (code2
, boolean_type_node
,
2758 else if (!same_bool_comparison_p (result
,
2762 else if (TREE_CODE (arg
) == SSA_NAME
)
2764 tree temp
= or_var_with_comparison (arg
, invert
,
2770 else if (!same_bool_result_p (result
, temp
))
2786 /* Try to simplify the OR of two comparisons, specified by
2787 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2788 If this can be simplified to a single expression (without requiring
2789 introducing more SSA variables to hold intermediate values),
2790 return the resulting tree. Otherwise return NULL_TREE.
2791 If the result expression is non-null, it has boolean type. */
2794 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2795 enum tree_code code2
, tree op2a
, tree op2b
)
2797 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2801 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);