1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl
)
59 struct varpool_node
*vnode
;
60 struct cgraph_node
*node
;
62 if (!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl
) && TREE_STATIC (decl
)
67 && TREE_CODE (decl
) == VAR_DECL
)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
72 gcc_checking_assert (!(vnode
= varpool_get_node (decl
))
73 || !vnode
->finalized
);
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl
))
92 if (TREE_CODE (decl
) == FUNCTION_DECL
)
94 node
= cgraph_get_node (decl
);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
100 if (!node
|| !node
->analyzed
|| node
->global
.inlined_to
)
103 else if (TREE_CODE (decl
) == VAR_DECL
)
105 vnode
= varpool_get_node (decl
);
106 if (!vnode
|| !vnode
->finalized
)
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
116 canonicalize_constructor_val (tree cval
)
119 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
)
121 tree t
= maybe_fold_offset_to_address (EXPR_LOCATION (cval
),
122 TREE_OPERAND (cval
, 0),
123 TREE_OPERAND (cval
, 1),
128 if (TREE_CODE (cval
) == ADDR_EXPR
)
130 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
133 && (TREE_CODE (base
) == VAR_DECL
134 || TREE_CODE (base
) == FUNCTION_DECL
)
135 && !can_refer_decl_in_current_unit_p (base
))
137 if (cfun
&& base
&& TREE_CODE (base
) == VAR_DECL
)
138 add_referenced_var (base
);
139 /* Fixup types in global initializers. */
140 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
141 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
146 /* If SYM is a constant variable with known value, return the value.
147 NULL_TREE is returned otherwise. */
150 get_symbol_constant_value (tree sym
)
152 if (const_value_known_p (sym
))
154 tree val
= DECL_INITIAL (sym
);
157 val
= canonicalize_constructor_val (val
);
158 if (val
&& is_gimple_min_invariant (val
))
163 /* Variables declared 'const' without an initializer
164 have zero as the initializer if they may not be
165 overridden at link or run time. */
167 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
168 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
169 return build_zero_cst (TREE_TYPE (sym
));
176 /* Return true if we may propagate the address expression ADDR into the
177 dereference DEREF and cancel them. */
180 may_propagate_address_into_dereference (tree addr
, tree deref
)
182 gcc_assert (TREE_CODE (deref
) == MEM_REF
183 && TREE_CODE (addr
) == ADDR_EXPR
);
185 /* Don't propagate if ADDR's operand has incomplete type. */
186 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
189 /* If the address is invariant then we do not need to preserve restrict
190 qualifications. But we do need to preserve volatile qualifiers until
191 we can annotate the folded dereference itself properly. */
192 if (is_gimple_min_invariant (addr
)
193 && (!TREE_THIS_VOLATILE (deref
)
194 || TYPE_VOLATILE (TREE_TYPE (addr
))))
195 return useless_type_conversion_p (TREE_TYPE (deref
),
196 TREE_TYPE (TREE_OPERAND (addr
, 0)));
198 /* Else both the address substitution and the folding must result in
199 a valid useless type conversion sequence. */
200 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
202 && useless_type_conversion_p (TREE_TYPE (deref
),
203 TREE_TYPE (TREE_OPERAND (addr
, 0))));
207 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
208 BASE is an array type. OFFSET is a byte displacement.
210 LOC is the location of the original expression. */
213 maybe_fold_offset_to_array_ref (location_t loc
, tree base
, tree offset
)
215 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
216 tree array_type
, elt_type
, elt_size
;
219 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
220 measured in units of the size of elements type) from that ARRAY_REF).
221 We can't do anything if either is variable.
223 The case we handle here is *(&A[N]+O). */
224 if (TREE_CODE (base
) == ARRAY_REF
)
226 tree low_bound
= array_ref_low_bound (base
);
228 elt_offset
= TREE_OPERAND (base
, 1);
229 if (TREE_CODE (low_bound
) != INTEGER_CST
230 || TREE_CODE (elt_offset
) != INTEGER_CST
)
233 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
);
234 base
= TREE_OPERAND (base
, 0);
237 /* Ignore stupid user tricks of indexing non-array variables. */
238 array_type
= TREE_TYPE (base
);
239 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
241 elt_type
= TREE_TYPE (array_type
);
243 /* Use signed size type for intermediate computation on the index. */
244 idx_type
= ssizetype
;
246 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
247 element type (so we can use the alignment if it's not constant).
248 Otherwise, compute the offset as an index by using a division. If the
249 division isn't exact, then don't do anything. */
250 elt_size
= TYPE_SIZE_UNIT (elt_type
);
253 if (integer_zerop (offset
))
255 if (TREE_CODE (elt_size
) != INTEGER_CST
)
256 elt_size
= size_int (TYPE_ALIGN (elt_type
));
258 idx
= build_int_cst (idx_type
, 0);
262 unsigned HOST_WIDE_INT lquo
, lrem
;
263 HOST_WIDE_INT hquo
, hrem
;
266 /* The final array offset should be signed, so we need
267 to sign-extend the (possibly pointer) offset here
268 and use signed division. */
269 soffset
= double_int_sext (tree_to_double_int (offset
),
270 TYPE_PRECISION (TREE_TYPE (offset
)));
271 if (TREE_CODE (elt_size
) != INTEGER_CST
272 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
273 soffset
.low
, soffset
.high
,
274 TREE_INT_CST_LOW (elt_size
),
275 TREE_INT_CST_HIGH (elt_size
),
276 &lquo
, &hquo
, &lrem
, &hrem
)
280 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
283 /* Assume the low bound is zero. If there is a domain type, get the
284 low bound, if any, convert the index into that type, and add the
286 min_idx
= build_int_cst (idx_type
, 0);
287 domain_type
= TYPE_DOMAIN (array_type
);
290 idx_type
= domain_type
;
291 if (TYPE_MIN_VALUE (idx_type
))
292 min_idx
= TYPE_MIN_VALUE (idx_type
);
294 min_idx
= fold_convert (idx_type
, min_idx
);
296 if (TREE_CODE (min_idx
) != INTEGER_CST
)
299 elt_offset
= fold_convert (idx_type
, elt_offset
);
302 if (!integer_zerop (min_idx
))
303 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
);
304 if (!integer_zerop (elt_offset
))
305 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
);
307 /* Make sure to possibly truncate late after offsetting. */
308 idx
= fold_convert (idx_type
, idx
);
310 /* We don't want to construct access past array bounds. For example
313 should not be simplified into (*c)[14] or tree-vrp will
315 This is only an issue for multi-dimensional arrays. */
316 if (TREE_CODE (elt_type
) == ARRAY_TYPE
319 if (TYPE_MAX_VALUE (domain_type
)
320 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
321 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type
), idx
))
323 else if (TYPE_MIN_VALUE (domain_type
)
324 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
325 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
327 else if (compare_tree_int (idx
, 0) < 0)
332 tree t
= build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
333 SET_EXPR_LOCATION (t
, loc
);
339 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
340 LOC is the location of original expression.
342 Before attempting the conversion strip off existing ADDR_EXPRs. */
345 maybe_fold_offset_to_reference (location_t loc
, tree base
, tree offset
,
351 if (TREE_CODE (base
) != ADDR_EXPR
)
354 base
= TREE_OPERAND (base
, 0);
355 if (types_compatible_p (orig_type
, TREE_TYPE (base
))
356 && integer_zerop (offset
))
359 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
);
360 if (ret
&& types_compatible_p (orig_type
, TREE_TYPE (ret
)))
365 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
366 LOC is the location of the original expression. */
369 maybe_fold_offset_to_address (location_t loc
, tree addr
, tree offset
,
375 if (TREE_CODE (addr
) != ADDR_EXPR
)
377 base
= TREE_OPERAND (addr
, 0);
378 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
);
381 ret
= build_fold_addr_expr (ret
);
382 if (!useless_type_conversion_p (orig_type
, TREE_TYPE (ret
)))
384 SET_EXPR_LOCATION (ret
, loc
);
391 /* A quaint feature extant in our address arithmetic is that there
392 can be hidden type changes here. The type of the result need
393 not be the same as the type of the input pointer.
395 What we're after here is an expression of the form
396 (T *)(&array + const)
397 where array is OP0, const is OP1, RES_TYPE is T and
398 the cast doesn't actually exist, but is implicit in the
399 type of the POINTER_PLUS_EXPR. We'd like to turn this into
401 which may be able to propagate further. */
404 maybe_fold_stmt_addition (location_t loc
, tree res_type
, tree op0
, tree op1
)
409 /* The first operand should be an ADDR_EXPR. */
410 if (TREE_CODE (op0
) != ADDR_EXPR
)
412 op0
= TREE_OPERAND (op0
, 0);
414 /* It had better be a constant. */
415 if (TREE_CODE (op1
) != INTEGER_CST
)
417 /* Or op0 should now be A[0] and the non-constant offset defined
418 via a multiplication by the array element size. */
419 if (TREE_CODE (op0
) == ARRAY_REF
420 /* As we will end up creating a variable index array access
421 in the outermost array dimension make sure there isn't
422 a more inner array that the index could overflow to. */
423 && TREE_CODE (TREE_OPERAND (op0
, 0)) != ARRAY_REF
424 && integer_zerop (TREE_OPERAND (op0
, 1))
425 && TREE_CODE (op1
) == SSA_NAME
)
427 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
428 tree elsz
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
429 if (!host_integerp (elsz
, 1)
430 || !is_gimple_assign (offset_def
))
433 /* Do not build array references of something that we can't
434 see the true number of array dimensions for. */
435 if (!DECL_P (TREE_OPERAND (op0
, 0))
436 && !handled_component_p (TREE_OPERAND (op0
, 0)))
439 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
440 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
441 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
), elsz
))
442 return build_fold_addr_expr
443 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
444 TREE_OPERAND (op0
, 0),
445 gimple_assign_rhs1 (offset_def
),
446 TREE_OPERAND (op0
, 2),
447 TREE_OPERAND (op0
, 3)));
448 else if (integer_onep (elsz
)
449 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
450 return build_fold_addr_expr
451 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
452 TREE_OPERAND (op0
, 0),
454 TREE_OPERAND (op0
, 2),
455 TREE_OPERAND (op0
, 3)));
457 else if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
459 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0
))) != ARRAY_TYPE
460 && TREE_CODE (op1
) == SSA_NAME
)
462 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
463 tree elsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0
)));
464 if (!host_integerp (elsz
, 1)
465 || !is_gimple_assign (offset_def
))
468 /* Do not build array references of something that we can't
469 see the true number of array dimensions for. */
471 && !handled_component_p (op0
))
474 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
475 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
476 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
), elsz
))
477 return build_fold_addr_expr
478 (build4 (ARRAY_REF
, TREE_TYPE (TREE_TYPE (op0
)),
479 op0
, gimple_assign_rhs1 (offset_def
),
480 integer_zero_node
, NULL_TREE
));
481 else if (integer_onep (elsz
)
482 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
483 return build_fold_addr_expr
484 (build4 (ARRAY_REF
, TREE_TYPE (TREE_TYPE (op0
)),
486 integer_zero_node
, NULL_TREE
));
492 /* If the first operand is an ARRAY_REF, expand it so that we can fold
493 the offset into it. */
494 while (TREE_CODE (op0
) == ARRAY_REF
)
496 tree array_obj
= TREE_OPERAND (op0
, 0);
497 tree array_idx
= TREE_OPERAND (op0
, 1);
498 tree elt_type
= TREE_TYPE (op0
);
499 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
502 if (TREE_CODE (array_idx
) != INTEGER_CST
)
504 if (TREE_CODE (elt_size
) != INTEGER_CST
)
507 /* Un-bias the index by the min index of the array type. */
508 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
511 min_idx
= TYPE_MIN_VALUE (min_idx
);
514 if (TREE_CODE (min_idx
) != INTEGER_CST
)
517 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
518 if (!integer_zerop (min_idx
))
519 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
524 /* Convert the index to a byte offset. */
525 array_idx
= fold_convert (sizetype
, array_idx
);
526 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
);
528 /* Update the operands for the next round, or for folding. */
529 op1
= int_const_binop (PLUS_EXPR
,
534 ptd_type
= TREE_TYPE (res_type
);
535 /* If we want a pointer to void, reconstruct the reference from the
536 array element type. A pointer to that can be trivially converted
537 to void *. This happens as we fold (void *)(ptr p+ off). */
538 if (VOID_TYPE_P (ptd_type
)
539 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
540 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
542 /* At which point we can try some of the same things as for indirects. */
543 t
= maybe_fold_offset_to_array_ref (loc
, op0
, op1
);
546 t
= build_fold_addr_expr (t
);
547 if (!useless_type_conversion_p (res_type
, TREE_TYPE (t
)))
549 SET_EXPR_LOCATION (t
, loc
);
555 /* Subroutine of fold_stmt. We perform several simplifications of the
556 memory reference tree EXPR and make sure to re-gimplify them properly
557 after propagation of constant addresses. IS_LHS is true if the
558 reference is supposed to be an lvalue. */
561 maybe_fold_reference (tree expr
, bool is_lhs
)
566 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
567 || TREE_CODE (expr
) == REALPART_EXPR
568 || TREE_CODE (expr
) == IMAGPART_EXPR
)
569 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
570 return fold_unary_loc (EXPR_LOCATION (expr
),
573 TREE_OPERAND (expr
, 0));
574 else if (TREE_CODE (expr
) == BIT_FIELD_REF
575 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
576 return fold_ternary_loc (EXPR_LOCATION (expr
),
579 TREE_OPERAND (expr
, 0),
580 TREE_OPERAND (expr
, 1),
581 TREE_OPERAND (expr
, 2));
583 while (handled_component_p (*t
))
584 t
= &TREE_OPERAND (*t
, 0);
586 /* Canonicalize MEM_REFs invariant address operand. Do this first
587 to avoid feeding non-canonical MEM_REFs elsewhere. */
588 if (TREE_CODE (*t
) == MEM_REF
589 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
591 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
592 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
593 TREE_OPERAND (*t
, 0),
594 TREE_OPERAND (*t
, 1));
597 TREE_THIS_VOLATILE (tem
) = volatile_p
;
599 tem
= maybe_fold_reference (expr
, is_lhs
);
607 && (result
= fold_const_aggregate_ref (expr
))
608 && is_gimple_min_invariant (result
))
611 /* Fold back MEM_REFs to reference trees. */
612 if (TREE_CODE (*t
) == MEM_REF
613 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
614 && integer_zerop (TREE_OPERAND (*t
, 1))
615 && (TREE_THIS_VOLATILE (*t
)
616 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
617 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
618 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
619 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
620 /* We have to look out here to not drop a required conversion
621 from the rhs to the lhs if is_lhs, but we don't have the
622 rhs here to verify that. Thus require strict type
624 && types_compatible_p (TREE_TYPE (*t
),
625 TREE_TYPE (TREE_OPERAND
626 (TREE_OPERAND (*t
, 0), 0))))
629 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
630 tem
= maybe_fold_reference (expr
, is_lhs
);
635 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
637 tree tem
= maybe_fold_tmr (*t
);
641 tem
= maybe_fold_reference (expr
, is_lhs
);
652 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
653 replacement rhs for the statement or NULL_TREE if no simplification
654 could be made. It is assumed that the operands have been previously
658 fold_gimple_assign (gimple_stmt_iterator
*si
)
660 gimple stmt
= gsi_stmt (*si
);
661 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
662 location_t loc
= gimple_location (stmt
);
664 tree result
= NULL_TREE
;
666 switch (get_gimple_rhs_class (subcode
))
668 case GIMPLE_SINGLE_RHS
:
670 tree rhs
= gimple_assign_rhs1 (stmt
);
672 /* Try to fold a conditional expression. */
673 if (TREE_CODE (rhs
) == COND_EXPR
)
675 tree op0
= COND_EXPR_COND (rhs
);
678 location_t cond_loc
= EXPR_LOCATION (rhs
);
680 if (COMPARISON_CLASS_P (op0
))
682 fold_defer_overflow_warnings ();
683 tem
= fold_binary_loc (cond_loc
,
684 TREE_CODE (op0
), TREE_TYPE (op0
),
685 TREE_OPERAND (op0
, 0),
686 TREE_OPERAND (op0
, 1));
687 /* This is actually a conditional expression, not a GIMPLE
688 conditional statement, however, the valid_gimple_rhs_p
689 test still applies. */
690 set
= (tem
&& is_gimple_condexpr (tem
)
691 && valid_gimple_rhs_p (tem
));
692 fold_undefer_overflow_warnings (set
, stmt
, 0);
694 else if (is_gimple_min_invariant (op0
))
703 result
= fold_build3_loc (cond_loc
, COND_EXPR
, TREE_TYPE (rhs
), tem
,
704 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
707 else if (REFERENCE_CLASS_P (rhs
))
708 return maybe_fold_reference (rhs
, false);
710 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
712 tree ref
= TREE_OPERAND (rhs
, 0);
713 tree tem
= maybe_fold_reference (ref
, true);
715 && TREE_CODE (tem
) == MEM_REF
716 && integer_zerop (TREE_OPERAND (tem
, 1)))
717 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
719 result
= fold_convert (TREE_TYPE (rhs
),
720 build_fold_addr_expr_loc (loc
, tem
));
721 else if (TREE_CODE (ref
) == MEM_REF
722 && integer_zerop (TREE_OPERAND (ref
, 1)))
723 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
726 else if (TREE_CODE (rhs
) == CONSTRUCTOR
727 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
728 && (CONSTRUCTOR_NELTS (rhs
)
729 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
731 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
735 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
736 if (TREE_CODE (val
) != INTEGER_CST
737 && TREE_CODE (val
) != REAL_CST
738 && TREE_CODE (val
) != FIXED_CST
)
741 return build_vector_from_ctor (TREE_TYPE (rhs
),
742 CONSTRUCTOR_ELTS (rhs
));
745 else if (DECL_P (rhs
))
746 return unshare_expr (get_symbol_constant_value (rhs
));
748 /* If we couldn't fold the RHS, hand over to the generic
750 if (result
== NULL_TREE
)
753 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
754 that may have been added by fold, and "useless" type
755 conversions that might now be apparent due to propagation. */
756 STRIP_USELESS_TYPE_CONVERSION (result
);
758 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
765 case GIMPLE_UNARY_RHS
:
767 tree rhs
= gimple_assign_rhs1 (stmt
);
769 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
772 /* If the operation was a conversion do _not_ mark a
773 resulting constant with TREE_OVERFLOW if the original
774 constant was not. These conversions have implementation
775 defined behavior and retaining the TREE_OVERFLOW flag
776 here would confuse later passes such as VRP. */
777 if (CONVERT_EXPR_CODE_P (subcode
)
778 && TREE_CODE (result
) == INTEGER_CST
779 && TREE_CODE (rhs
) == INTEGER_CST
)
780 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
782 STRIP_USELESS_TYPE_CONVERSION (result
);
783 if (valid_gimple_rhs_p (result
))
786 else if (CONVERT_EXPR_CODE_P (subcode
)
787 && POINTER_TYPE_P (gimple_expr_type (stmt
))
788 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
790 tree type
= gimple_expr_type (stmt
);
791 tree t
= maybe_fold_offset_to_address (loc
,
792 gimple_assign_rhs1 (stmt
),
793 integer_zero_node
, type
);
800 case GIMPLE_BINARY_RHS
:
801 /* Try to fold pointer addition. */
802 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
804 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
805 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
807 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
808 if (!useless_type_conversion_p
809 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
810 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
812 result
= maybe_fold_stmt_addition (gimple_location (stmt
),
814 gimple_assign_rhs1 (stmt
),
815 gimple_assign_rhs2 (stmt
));
819 result
= fold_binary_loc (loc
, subcode
,
820 TREE_TYPE (gimple_assign_lhs (stmt
)),
821 gimple_assign_rhs1 (stmt
),
822 gimple_assign_rhs2 (stmt
));
826 STRIP_USELESS_TYPE_CONVERSION (result
);
827 if (valid_gimple_rhs_p (result
))
830 /* Fold might have produced non-GIMPLE, so if we trust it blindly
831 we lose canonicalization opportunities. Do not go again
832 through fold here though, or the same non-GIMPLE will be
834 if (commutative_tree_code (subcode
)
835 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
836 gimple_assign_rhs2 (stmt
), false))
837 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
838 gimple_assign_rhs2 (stmt
),
839 gimple_assign_rhs1 (stmt
));
843 case GIMPLE_TERNARY_RHS
:
844 result
= fold_ternary_loc (loc
, subcode
,
845 TREE_TYPE (gimple_assign_lhs (stmt
)),
846 gimple_assign_rhs1 (stmt
),
847 gimple_assign_rhs2 (stmt
),
848 gimple_assign_rhs3 (stmt
));
852 STRIP_USELESS_TYPE_CONVERSION (result
);
853 if (valid_gimple_rhs_p (result
))
856 /* Fold might have produced non-GIMPLE, so if we trust it blindly
857 we lose canonicalization opportunities. Do not go again
858 through fold here though, or the same non-GIMPLE will be
860 if (commutative_ternary_tree_code (subcode
)
861 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
862 gimple_assign_rhs2 (stmt
), false))
863 return build3 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
864 gimple_assign_rhs2 (stmt
),
865 gimple_assign_rhs1 (stmt
),
866 gimple_assign_rhs3 (stmt
));
870 case GIMPLE_INVALID_RHS
:
877 /* Attempt to fold a conditional statement. Return true if any changes were
878 made. We only attempt to fold the condition expression, and do not perform
879 any transformation that would require alteration of the cfg. It is
880 assumed that the operands have been previously folded. */
883 fold_gimple_cond (gimple stmt
)
885 tree result
= fold_binary_loc (gimple_location (stmt
),
886 gimple_cond_code (stmt
),
888 gimple_cond_lhs (stmt
),
889 gimple_cond_rhs (stmt
));
893 STRIP_USELESS_TYPE_CONVERSION (result
);
894 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
896 gimple_cond_set_condition_from_tree (stmt
, result
);
904 /* Convert EXPR into a GIMPLE value suitable for substitution on the
905 RHS of an assignment. Insert the necessary statements before
906 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
907 is replaced. If the call is expected to produces a result, then it
908 is replaced by an assignment of the new RHS to the result variable.
909 If the result is to be ignored, then the call is replaced by a
910 GIMPLE_NOP. A proper VDEF chain is retained by making the first
911 VUSE and the last VDEF of the whole sequence be the same as the replaced
912 statement and using new SSA names for stores in between. */
915 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
918 tree tmp
= NULL_TREE
; /* Silence warning. */
919 gimple stmt
, new_stmt
;
920 gimple_stmt_iterator i
;
921 gimple_seq stmts
= gimple_seq_alloc();
922 struct gimplify_ctx gctx
;
924 gimple laststore
= NULL
;
927 stmt
= gsi_stmt (*si_p
);
929 gcc_assert (is_gimple_call (stmt
));
931 lhs
= gimple_call_lhs (stmt
);
932 reaching_vuse
= gimple_vuse (stmt
);
934 push_gimplify_context (&gctx
);
936 if (lhs
== NULL_TREE
)
938 gimplify_and_add (expr
, &stmts
);
939 /* We can end up with folding a memcpy of an empty class assignment
940 which gets optimized away by C++ gimplification. */
941 if (gimple_seq_empty_p (stmts
))
943 pop_gimplify_context (NULL
);
944 if (gimple_in_ssa_p (cfun
))
946 unlink_stmt_vdef (stmt
);
949 gsi_remove (si_p
, true);
954 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
956 pop_gimplify_context (NULL
);
958 if (gimple_has_location (stmt
))
959 annotate_all_with_location (stmts
, gimple_location (stmt
));
961 /* The replacement can expose previously unreferenced variables. */
962 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
966 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
969 new_stmt
= gsi_stmt (i
);
970 if (gimple_in_ssa_p (cfun
))
972 find_new_referenced_vars (new_stmt
);
973 mark_symbols_for_renaming (new_stmt
);
975 /* If the new statement has a VUSE, update it with exact SSA name we
976 know will reach this one. */
977 if (gimple_vuse (new_stmt
))
979 /* If we've also seen a previous store create a new VDEF for
980 the latter one, and make that the new reaching VUSE. */
983 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
984 gimple_set_vdef (laststore
, reaching_vuse
);
985 update_stmt (laststore
);
988 gimple_set_vuse (new_stmt
, reaching_vuse
);
989 gimple_set_modified (new_stmt
, true);
991 if (gimple_assign_single_p (new_stmt
)
992 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
994 laststore
= new_stmt
;
999 if (lhs
== NULL_TREE
)
1001 /* If we replace a call without LHS that has a VDEF and our new
1002 sequence ends with a store we must make that store have the same
1003 vdef in order not to break the sequencing. This can happen
1004 for instance when folding memcpy calls into assignments. */
1005 if (gimple_vdef (stmt
) && laststore
)
1007 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
1008 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1009 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
1010 update_stmt (laststore
);
1012 else if (gimple_in_ssa_p (cfun
))
1014 unlink_stmt_vdef (stmt
);
1015 release_defs (stmt
);
1023 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
1026 if (laststore
&& is_gimple_reg (lhs
))
1028 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
1029 update_stmt (laststore
);
1030 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1031 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
1036 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
1037 gimple_set_vdef (laststore
, reaching_vuse
);
1038 update_stmt (laststore
);
1041 new_stmt
= gimple_build_assign (lhs
, tmp
);
1042 if (!is_gimple_reg (tmp
))
1043 gimple_set_vuse (new_stmt
, reaching_vuse
);
1044 if (!is_gimple_reg (lhs
))
1046 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1047 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1048 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = new_stmt
;
1050 else if (reaching_vuse
== gimple_vuse (stmt
))
1051 unlink_stmt_vdef (stmt
);
1054 gimple_set_location (new_stmt
, gimple_location (stmt
));
1055 gsi_replace (si_p
, new_stmt
, false);
1058 /* Return the string length, maximum string length or maximum value of
1060 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1061 is not NULL and, for TYPE == 0, its value is not equal to the length
1062 we determine or if we are unable to determine the length or value,
1063 return false. VISITED is a bitmap of visited variables.
1064 TYPE is 0 if string length should be returned, 1 for maximum string
1065 length and 2 for maximum value ARG can have. */
1068 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
1073 if (TREE_CODE (arg
) != SSA_NAME
)
1075 if (TREE_CODE (arg
) == COND_EXPR
)
1076 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
1077 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
1078 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1079 else if (TREE_CODE (arg
) == ADDR_EXPR
1080 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1081 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1083 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1084 if (TREE_CODE (aop0
) == INDIRECT_REF
1085 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1086 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1087 length
, visited
, type
);
1093 if (TREE_CODE (val
) != INTEGER_CST
1094 || tree_int_cst_sgn (val
) < 0)
1098 val
= c_strlen (arg
, 1);
1106 if (TREE_CODE (*length
) != INTEGER_CST
1107 || TREE_CODE (val
) != INTEGER_CST
)
1110 if (tree_int_cst_lt (*length
, val
))
1114 else if (simple_cst_equal (val
, *length
) != 1)
1122 /* If we were already here, break the infinite cycle. */
1123 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
1127 def_stmt
= SSA_NAME_DEF_STMT (var
);
1129 switch (gimple_code (def_stmt
))
1132 /* The RHS of the statement defining VAR must either have a
1133 constant length or come from another SSA_NAME with a constant
1135 if (gimple_assign_single_p (def_stmt
)
1136 || gimple_assign_unary_nop_p (def_stmt
))
1138 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1139 return get_maxval_strlen (rhs
, length
, visited
, type
);
1145 /* All the arguments of the PHI node must have the same constant
1149 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1151 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1153 /* If this PHI has itself as an argument, we cannot
1154 determine the string length of this argument. However,
1155 if we can find a constant string length for the other
1156 PHI args then we can still be sure that this is a
1157 constant string length. So be optimistic and just
1158 continue with the next argument. */
1159 if (arg
== gimple_phi_result (def_stmt
))
1162 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1174 /* Fold builtin call in statement STMT. Returns a simplified tree.
1175 We may return a non-constant expression, including another call
1176 to a different function and with different arguments, e.g.,
1177 substituting memcpy for strcpy when the string length is known.
1178 Note that some builtins expand into inline code that may not
1179 be valid in GIMPLE. Callers must take care. */
1182 gimple_fold_builtin (gimple stmt
)
1184 tree result
, val
[3];
1190 location_t loc
= gimple_location (stmt
);
1192 gcc_assert (is_gimple_call (stmt
));
1194 ignore
= (gimple_call_lhs (stmt
) == NULL
);
1196 /* First try the generic builtin folder. If that succeeds, return the
1198 result
= fold_call_stmt (stmt
, ignore
);
1202 STRIP_NOPS (result
);
1206 /* Ignore MD builtins. */
1207 callee
= gimple_call_fndecl (stmt
);
1208 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1211 /* If the builtin could not be folded, and it has no argument list,
1213 nargs
= gimple_call_num_args (stmt
);
1217 /* Limit the work only for builtins we know how to simplify. */
1218 switch (DECL_FUNCTION_CODE (callee
))
1220 case BUILT_IN_STRLEN
:
1221 case BUILT_IN_FPUTS
:
1222 case BUILT_IN_FPUTS_UNLOCKED
:
1226 case BUILT_IN_STRCPY
:
1227 case BUILT_IN_STRNCPY
:
1231 case BUILT_IN_MEMCPY_CHK
:
1232 case BUILT_IN_MEMPCPY_CHK
:
1233 case BUILT_IN_MEMMOVE_CHK
:
1234 case BUILT_IN_MEMSET_CHK
:
1235 case BUILT_IN_STRNCPY_CHK
:
1239 case BUILT_IN_STRCPY_CHK
:
1240 case BUILT_IN_STPCPY_CHK
:
1244 case BUILT_IN_SNPRINTF_CHK
:
1245 case BUILT_IN_VSNPRINTF_CHK
:
1253 if (arg_idx
>= nargs
)
1256 /* Try to use the dataflow information gathered by the CCP process. */
1257 visited
= BITMAP_ALLOC (NULL
);
1258 bitmap_clear (visited
);
1260 memset (val
, 0, sizeof (val
));
1261 a
= gimple_call_arg (stmt
, arg_idx
);
1262 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
1263 val
[arg_idx
] = NULL_TREE
;
1265 BITMAP_FREE (visited
);
1268 switch (DECL_FUNCTION_CODE (callee
))
1270 case BUILT_IN_STRLEN
:
1271 if (val
[0] && nargs
== 1)
1274 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
1276 /* If the result is not a valid gimple value, or not a cast
1277 of a valid gimple value, then we cannot use the result. */
1278 if (is_gimple_val (new_val
)
1279 || (CONVERT_EXPR_P (new_val
)
1280 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
1285 case BUILT_IN_STRCPY
:
1286 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
1287 result
= fold_builtin_strcpy (loc
, callee
,
1288 gimple_call_arg (stmt
, 0),
1289 gimple_call_arg (stmt
, 1),
1293 case BUILT_IN_STRNCPY
:
1294 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1295 result
= fold_builtin_strncpy (loc
, callee
,
1296 gimple_call_arg (stmt
, 0),
1297 gimple_call_arg (stmt
, 1),
1298 gimple_call_arg (stmt
, 2),
1302 case BUILT_IN_FPUTS
:
1304 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1305 gimple_call_arg (stmt
, 1),
1306 ignore
, false, val
[0]);
1309 case BUILT_IN_FPUTS_UNLOCKED
:
1311 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1312 gimple_call_arg (stmt
, 1),
1313 ignore
, true, val
[0]);
1316 case BUILT_IN_MEMCPY_CHK
:
1317 case BUILT_IN_MEMPCPY_CHK
:
1318 case BUILT_IN_MEMMOVE_CHK
:
1319 case BUILT_IN_MEMSET_CHK
:
1320 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1321 result
= fold_builtin_memory_chk (loc
, callee
,
1322 gimple_call_arg (stmt
, 0),
1323 gimple_call_arg (stmt
, 1),
1324 gimple_call_arg (stmt
, 2),
1325 gimple_call_arg (stmt
, 3),
1327 DECL_FUNCTION_CODE (callee
));
1330 case BUILT_IN_STRCPY_CHK
:
1331 case BUILT_IN_STPCPY_CHK
:
1332 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1333 result
= fold_builtin_stxcpy_chk (loc
, callee
,
1334 gimple_call_arg (stmt
, 0),
1335 gimple_call_arg (stmt
, 1),
1336 gimple_call_arg (stmt
, 2),
1338 DECL_FUNCTION_CODE (callee
));
1341 case BUILT_IN_STRNCPY_CHK
:
1342 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1343 result
= fold_builtin_strncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1344 gimple_call_arg (stmt
, 1),
1345 gimple_call_arg (stmt
, 2),
1346 gimple_call_arg (stmt
, 3),
1350 case BUILT_IN_SNPRINTF_CHK
:
1351 case BUILT_IN_VSNPRINTF_CHK
:
1352 if (val
[1] && is_gimple_val (val
[1]))
1353 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1354 DECL_FUNCTION_CODE (callee
));
1361 if (result
&& ignore
)
1362 result
= fold_ignored_result (result
);
1366 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1367 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1368 KNOWN_BINFO carries the binfo describing the true type of
1369 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1370 with a this adjustment, the constant which should be added to this pointer
1371 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1372 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1375 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
1381 v
= BINFO_VIRTUALS (known_binfo
);
1382 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1388 i
+= (TARGET_VTABLE_USES_DESCRIPTORS
1389 ? TARGET_VTABLE_USES_DESCRIPTORS
: 1);
1393 /* If BV_VCALL_INDEX is non-NULL, give up. */
1397 fndecl
= TREE_VALUE (v
);
1399 /* When cgraph node is missing and function is not public, we cannot
1400 devirtualize. This can happen in WHOPR when the actual method
1401 ends up in other partition, because we found devirtualization
1402 possibility too late. */
1403 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v
)))
1406 *delta
= TREE_PURPOSE (v
);
1407 gcc_checking_assert (host_integerp (*delta
, 0));
1411 /* Generate code adjusting the first parameter of a call statement determined
1415 gimple_adjust_this_by_delta (gimple_stmt_iterator
*gsi
, tree delta
)
1417 gimple call_stmt
= gsi_stmt (*gsi
);
1421 delta
= fold_convert (sizetype
, delta
);
1422 gcc_assert (gimple_call_num_args (call_stmt
) >= 1);
1423 parm
= gimple_call_arg (call_stmt
, 0);
1424 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm
)));
1425 tmp
= create_tmp_var (TREE_TYPE (parm
), NULL
);
1426 add_referenced_var (tmp
);
1428 tmp
= make_ssa_name (tmp
, NULL
);
1429 new_stmt
= gimple_build_assign_with_ops (POINTER_PLUS_EXPR
, tmp
, parm
, delta
);
1430 SSA_NAME_DEF_STMT (tmp
) = new_stmt
;
1431 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1432 gimple_call_set_arg (call_stmt
, 0, tmp
);
1435 /* Return a binfo to be used for devirtualization of calls based on an object
1436 represented by a declaration (i.e. a global or automatically allocated one)
1437 or NULL if it cannot be found or is not safe. CST is expected to be an
1438 ADDR_EXPR of such object or the function will return NULL. Currently it is
1439 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1442 gimple_extract_devirt_binfo_from_cst (tree cst
)
1444 HOST_WIDE_INT offset
, size
, max_size
;
1445 tree base
, type
, expected_type
, binfo
;
1446 bool last_artificial
= false;
1448 if (!flag_devirtualize
1449 || TREE_CODE (cst
) != ADDR_EXPR
1450 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1453 cst
= TREE_OPERAND (cst
, 0);
1454 expected_type
= TREE_TYPE (cst
);
1455 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1456 type
= TREE_TYPE (base
);
1460 || TREE_CODE (type
) != RECORD_TYPE
)
1463 /* Find the sub-object the constant actually refers to and mark whether it is
1464 an artificial one (as opposed to a user-defined one). */
1467 HOST_WIDE_INT pos
, size
;
1470 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (expected_type
))
1475 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1477 if (TREE_CODE (fld
) != FIELD_DECL
)
1480 pos
= int_bit_position (fld
);
1481 size
= tree_low_cst (DECL_SIZE (fld
), 1);
1482 if (pos
<= offset
&& (pos
+ size
) > offset
)
1485 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1488 last_artificial
= DECL_ARTIFICIAL (fld
);
1489 type
= TREE_TYPE (fld
);
1492 /* Artifical sub-objects are ancestors, we do not want to use them for
1493 devirtualization, at least not here. */
1494 if (last_artificial
)
1496 binfo
= TYPE_BINFO (type
);
1497 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1503 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1504 The statement may be replaced by another statement, e.g., if the call
1505 simplifies to a constant value. Return true if any changes were made.
1506 It is assumed that the operands have been previously folded. */
1509 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1511 gimple stmt
= gsi_stmt (*gsi
);
1514 /* Check for builtins that CCP can handle using information not
1515 available in the generic fold routines. */
1516 callee
= gimple_call_fndecl (stmt
);
1517 if (!inplace
&& callee
&& DECL_BUILT_IN (callee
))
1519 tree result
= gimple_fold_builtin (stmt
);
1523 if (!update_call_from_tree (gsi
, result
))
1524 gimplify_and_update_call_from_tree (gsi
, result
);
1529 /* Check for virtual calls that became direct calls. */
1530 callee
= gimple_call_fn (stmt
);
1531 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1533 tree binfo
, fndecl
, delta
, obj
;
1534 HOST_WIDE_INT token
;
1536 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1538 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1542 obj
= OBJ_TYPE_REF_OBJECT (callee
);
1543 binfo
= gimple_extract_devirt_binfo_from_cst (obj
);
1546 token
= TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1547 fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
, &delta
);
1550 gcc_assert (integer_zerop (delta
));
1551 gimple_call_set_fndecl (stmt
, fndecl
);
1558 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1559 distinguishes both cases. */
1562 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1564 bool changed
= false;
1565 gimple stmt
= gsi_stmt (*gsi
);
1567 gimple_stmt_iterator gsinext
= *gsi
;
1570 gsi_next (&gsinext
);
1571 next_stmt
= gsi_end_p (gsinext
) ? NULL
: gsi_stmt (gsinext
);
1573 /* Fold the main computation performed by the statement. */
1574 switch (gimple_code (stmt
))
1578 unsigned old_num_ops
= gimple_num_ops (stmt
);
1579 tree new_rhs
= fold_gimple_assign (gsi
);
1580 tree lhs
= gimple_assign_lhs (stmt
);
1582 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1583 TREE_TYPE (new_rhs
)))
1584 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1587 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1589 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1596 changed
|= fold_gimple_cond (stmt
);
1600 /* Fold *& in call arguments. */
1601 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1602 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1604 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1607 gimple_call_set_arg (stmt
, i
, tmp
);
1611 changed
|= gimple_fold_call (gsi
, inplace
);
1615 /* Fold *& in asm operands. */
1616 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1618 tree link
= gimple_asm_output_op (stmt
, i
);
1619 tree op
= TREE_VALUE (link
);
1620 if (REFERENCE_CLASS_P (op
)
1621 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1623 TREE_VALUE (link
) = op
;
1627 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1629 tree link
= gimple_asm_input_op (stmt
, i
);
1630 tree op
= TREE_VALUE (link
);
1631 if (REFERENCE_CLASS_P (op
)
1632 && (op
= maybe_fold_reference (op
, false)) != NULL_TREE
)
1634 TREE_VALUE (link
) = op
;
1641 if (gimple_debug_bind_p (stmt
))
1643 tree val
= gimple_debug_bind_get_value (stmt
);
1645 && REFERENCE_CLASS_P (val
))
1647 tree tem
= maybe_fold_reference (val
, false);
1650 gimple_debug_bind_set_value (stmt
, tem
);
1660 /* If stmt folds into nothing and it was the last stmt in a bb,
1661 don't call gsi_stmt. */
1662 if (gsi_end_p (*gsi
))
1664 gcc_assert (next_stmt
== NULL
);
1668 stmt
= gsi_stmt (*gsi
);
1670 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1671 as we'd changing the next stmt. */
1672 if (gimple_has_lhs (stmt
) && stmt
!= next_stmt
)
1674 tree lhs
= gimple_get_lhs (stmt
);
1675 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1677 tree new_lhs
= maybe_fold_reference (lhs
, true);
1680 gimple_set_lhs (stmt
, new_lhs
);
1689 /* Fold the statement pointed to by GSI. In some cases, this function may
1690 replace the whole statement with a new one. Returns true iff folding
1692 The statement pointed to by GSI should be in valid gimple form but may
1693 be in unfolded state as resulting from for example constant propagation
1694 which can produce *&x = 0. */
1697 fold_stmt (gimple_stmt_iterator
*gsi
)
1699 return fold_stmt_1 (gsi
, false);
1702 /* Perform the minimal folding on statement STMT. Only operations like
1703 *&x created by constant propagation are handled. The statement cannot
1704 be replaced with a new one. Return true if the statement was
1705 changed, false otherwise.
1706 The statement STMT should be in valid gimple form but may
1707 be in unfolded state as resulting from for example constant propagation
1708 which can produce *&x = 0. */
1711 fold_stmt_inplace (gimple stmt
)
1713 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1714 bool changed
= fold_stmt_1 (&gsi
, true);
1715 gcc_assert (gsi_stmt (gsi
) == stmt
);
1719 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1720 if EXPR is null or we don't know how.
1721 If non-null, the result always has boolean type. */
1724 canonicalize_bool (tree expr
, bool invert
)
1730 if (integer_nonzerop (expr
))
1731 return boolean_false_node
;
1732 else if (integer_zerop (expr
))
1733 return boolean_true_node
;
1734 else if (TREE_CODE (expr
) == SSA_NAME
)
1735 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1736 build_int_cst (TREE_TYPE (expr
), 0));
1737 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1738 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1740 TREE_OPERAND (expr
, 0),
1741 TREE_OPERAND (expr
, 1));
1747 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1749 if (integer_nonzerop (expr
))
1750 return boolean_true_node
;
1751 else if (integer_zerop (expr
))
1752 return boolean_false_node
;
1753 else if (TREE_CODE (expr
) == SSA_NAME
)
1754 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1755 build_int_cst (TREE_TYPE (expr
), 0));
1756 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1757 return fold_build2 (TREE_CODE (expr
),
1759 TREE_OPERAND (expr
, 0),
1760 TREE_OPERAND (expr
, 1));
1766 /* Check to see if a boolean expression EXPR is logically equivalent to the
1767 comparison (OP1 CODE OP2). Check for various identities involving
1771 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1772 const_tree op1
, const_tree op2
)
1776 /* The obvious case. */
1777 if (TREE_CODE (expr
) == code
1778 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1779 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1782 /* Check for comparing (name, name != 0) and the case where expr
1783 is an SSA_NAME with a definition matching the comparison. */
1784 if (TREE_CODE (expr
) == SSA_NAME
1785 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1787 if (operand_equal_p (expr
, op1
, 0))
1788 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1789 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1790 s
= SSA_NAME_DEF_STMT (expr
);
1791 if (is_gimple_assign (s
)
1792 && gimple_assign_rhs_code (s
) == code
1793 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1794 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1798 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1799 of name is a comparison, recurse. */
1800 if (TREE_CODE (op1
) == SSA_NAME
1801 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1803 s
= SSA_NAME_DEF_STMT (op1
);
1804 if (is_gimple_assign (s
)
1805 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1807 enum tree_code c
= gimple_assign_rhs_code (s
);
1808 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1809 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1810 return same_bool_comparison_p (expr
, c
,
1811 gimple_assign_rhs1 (s
),
1812 gimple_assign_rhs2 (s
));
1813 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1814 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1815 return same_bool_comparison_p (expr
,
1816 invert_tree_comparison (c
, false),
1817 gimple_assign_rhs1 (s
),
1818 gimple_assign_rhs2 (s
));
1824 /* Check to see if two boolean expressions OP1 and OP2 are logically
1828 same_bool_result_p (const_tree op1
, const_tree op2
)
1830 /* Simple cases first. */
1831 if (operand_equal_p (op1
, op2
, 0))
1834 /* Check the cases where at least one of the operands is a comparison.
1835 These are a bit smarter than operand_equal_p in that they apply some
1836 identifies on SSA_NAMEs. */
1837 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1838 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1839 TREE_OPERAND (op2
, 0),
1840 TREE_OPERAND (op2
, 1)))
1842 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1843 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1844 TREE_OPERAND (op1
, 0),
1845 TREE_OPERAND (op1
, 1)))
1852 /* Forward declarations for some mutually recursive functions. */
1855 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1856 enum tree_code code2
, tree op2a
, tree op2b
);
1858 and_var_with_comparison (tree var
, bool invert
,
1859 enum tree_code code2
, tree op2a
, tree op2b
);
1861 and_var_with_comparison_1 (gimple stmt
,
1862 enum tree_code code2
, tree op2a
, tree op2b
);
1864 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1865 enum tree_code code2
, tree op2a
, tree op2b
);
1867 or_var_with_comparison (tree var
, bool invert
,
1868 enum tree_code code2
, tree op2a
, tree op2b
);
1870 or_var_with_comparison_1 (gimple stmt
,
1871 enum tree_code code2
, tree op2a
, tree op2b
);
1873 /* Helper function for and_comparisons_1: try to simplify the AND of the
1874 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1875 If INVERT is true, invert the value of the VAR before doing the AND.
1876 Return NULL_EXPR if we can't simplify this to a single expression. */
1879 and_var_with_comparison (tree var
, bool invert
,
1880 enum tree_code code2
, tree op2a
, tree op2b
)
1883 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1885 /* We can only deal with variables whose definitions are assignments. */
1886 if (!is_gimple_assign (stmt
))
1889 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1890 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1891 Then we only have to consider the simpler non-inverted cases. */
1893 t
= or_var_with_comparison_1 (stmt
,
1894 invert_tree_comparison (code2
, false),
1897 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1898 return canonicalize_bool (t
, invert
);
1901 /* Try to simplify the AND of the ssa variable defined by the assignment
1902 STMT with the comparison specified by (OP2A CODE2 OP2B).
1903 Return NULL_EXPR if we can't simplify this to a single expression. */
1906 and_var_with_comparison_1 (gimple stmt
,
1907 enum tree_code code2
, tree op2a
, tree op2b
)
1909 tree var
= gimple_assign_lhs (stmt
);
1910 tree true_test_var
= NULL_TREE
;
1911 tree false_test_var
= NULL_TREE
;
1912 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1914 /* Check for identities like (var AND (var == 0)) => false. */
1915 if (TREE_CODE (op2a
) == SSA_NAME
1916 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1918 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1919 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1921 true_test_var
= op2a
;
1922 if (var
== true_test_var
)
1925 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1926 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1928 false_test_var
= op2a
;
1929 if (var
== false_test_var
)
1930 return boolean_false_node
;
1934 /* If the definition is a comparison, recurse on it. */
1935 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1937 tree t
= and_comparisons_1 (innercode
,
1938 gimple_assign_rhs1 (stmt
),
1939 gimple_assign_rhs2 (stmt
),
1947 /* If the definition is an AND or OR expression, we may be able to
1948 simplify by reassociating. */
1949 if (innercode
== TRUTH_AND_EXPR
1950 || innercode
== TRUTH_OR_EXPR
1951 || (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1952 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
)))
1954 tree inner1
= gimple_assign_rhs1 (stmt
);
1955 tree inner2
= gimple_assign_rhs2 (stmt
);
1958 tree partial
= NULL_TREE
;
1959 bool is_and
= (innercode
== TRUTH_AND_EXPR
|| innercode
== BIT_AND_EXPR
);
1961 /* Check for boolean identities that don't require recursive examination
1963 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1964 inner1 AND (inner1 OR inner2) => inner1
1965 !inner1 AND (inner1 AND inner2) => false
1966 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1967 Likewise for similar cases involving inner2. */
1968 if (inner1
== true_test_var
)
1969 return (is_and
? var
: inner1
);
1970 else if (inner2
== true_test_var
)
1971 return (is_and
? var
: inner2
);
1972 else if (inner1
== false_test_var
)
1974 ? boolean_false_node
1975 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1976 else if (inner2
== false_test_var
)
1978 ? boolean_false_node
1979 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1981 /* Next, redistribute/reassociate the AND across the inner tests.
1982 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1983 if (TREE_CODE (inner1
) == SSA_NAME
1984 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1985 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1986 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1987 gimple_assign_rhs1 (s
),
1988 gimple_assign_rhs2 (s
),
1989 code2
, op2a
, op2b
)))
1991 /* Handle the AND case, where we are reassociating:
1992 (inner1 AND inner2) AND (op2a code2 op2b)
1994 If the partial result t is a constant, we win. Otherwise
1995 continue on to try reassociating with the other inner test. */
1998 if (integer_onep (t
))
2000 else if (integer_zerop (t
))
2001 return boolean_false_node
;
2004 /* Handle the OR case, where we are redistributing:
2005 (inner1 OR inner2) AND (op2a code2 op2b)
2006 => (t OR (inner2 AND (op2a code2 op2b))) */
2007 else if (integer_onep (t
))
2008 return boolean_true_node
;
2010 /* Save partial result for later. */
2014 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2015 if (TREE_CODE (inner2
) == SSA_NAME
2016 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2017 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2018 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
2019 gimple_assign_rhs1 (s
),
2020 gimple_assign_rhs2 (s
),
2021 code2
, op2a
, op2b
)))
2023 /* Handle the AND case, where we are reassociating:
2024 (inner1 AND inner2) AND (op2a code2 op2b)
2025 => (inner1 AND t) */
2028 if (integer_onep (t
))
2030 else if (integer_zerop (t
))
2031 return boolean_false_node
;
2032 /* If both are the same, we can apply the identity
2034 else if (partial
&& same_bool_result_p (t
, partial
))
2038 /* Handle the OR case. where we are redistributing:
2039 (inner1 OR inner2) AND (op2a code2 op2b)
2040 => (t OR (inner1 AND (op2a code2 op2b)))
2041 => (t OR partial) */
2044 if (integer_onep (t
))
2045 return boolean_true_node
;
2048 /* We already got a simplification for the other
2049 operand to the redistributed OR expression. The
2050 interesting case is when at least one is false.
2051 Or, if both are the same, we can apply the identity
2053 if (integer_zerop (partial
))
2055 else if (integer_zerop (t
))
2057 else if (same_bool_result_p (t
, partial
))
2066 /* Try to simplify the AND of two comparisons defined by
2067 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2068 If this can be done without constructing an intermediate value,
2069 return the resulting tree; otherwise NULL_TREE is returned.
2070 This function is deliberately asymmetric as it recurses on SSA_DEFs
2071 in the first comparison but not the second. */
2074 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2075 enum tree_code code2
, tree op2a
, tree op2b
)
2077 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2078 if (operand_equal_p (op1a
, op2a
, 0)
2079 && operand_equal_p (op1b
, op2b
, 0))
2081 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2082 TRUTH_ANDIF_EXPR
, code1
, code2
,
2083 boolean_type_node
, op1a
, op1b
);
2088 /* Likewise the swapped case of the above. */
2089 if (operand_equal_p (op1a
, op2b
, 0)
2090 && operand_equal_p (op1b
, op2a
, 0))
2092 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2093 TRUTH_ANDIF_EXPR
, code1
,
2094 swap_tree_comparison (code2
),
2095 boolean_type_node
, op1a
, op1b
);
2100 /* If both comparisons are of the same value against constants, we might
2101 be able to merge them. */
2102 if (operand_equal_p (op1a
, op2a
, 0)
2103 && TREE_CODE (op1b
) == INTEGER_CST
2104 && TREE_CODE (op2b
) == INTEGER_CST
)
2106 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2108 /* If we have (op1a == op1b), we should either be able to
2109 return that or FALSE, depending on whether the constant op1b
2110 also satisfies the other comparison against op2b. */
2111 if (code1
== EQ_EXPR
)
2117 case EQ_EXPR
: val
= (cmp
== 0); break;
2118 case NE_EXPR
: val
= (cmp
!= 0); break;
2119 case LT_EXPR
: val
= (cmp
< 0); break;
2120 case GT_EXPR
: val
= (cmp
> 0); break;
2121 case LE_EXPR
: val
= (cmp
<= 0); break;
2122 case GE_EXPR
: val
= (cmp
>= 0); break;
2123 default: done
= false;
2128 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2130 return boolean_false_node
;
2133 /* Likewise if the second comparison is an == comparison. */
2134 else if (code2
== EQ_EXPR
)
2140 case EQ_EXPR
: val
= (cmp
== 0); break;
2141 case NE_EXPR
: val
= (cmp
!= 0); break;
2142 case LT_EXPR
: val
= (cmp
> 0); break;
2143 case GT_EXPR
: val
= (cmp
< 0); break;
2144 case LE_EXPR
: val
= (cmp
>= 0); break;
2145 case GE_EXPR
: val
= (cmp
<= 0); break;
2146 default: done
= false;
2151 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2153 return boolean_false_node
;
2157 /* Same business with inequality tests. */
2158 else if (code1
== NE_EXPR
)
2163 case EQ_EXPR
: val
= (cmp
!= 0); break;
2164 case NE_EXPR
: val
= (cmp
== 0); break;
2165 case LT_EXPR
: val
= (cmp
>= 0); break;
2166 case GT_EXPR
: val
= (cmp
<= 0); break;
2167 case LE_EXPR
: val
= (cmp
> 0); break;
2168 case GE_EXPR
: val
= (cmp
< 0); break;
2173 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2175 else if (code2
== NE_EXPR
)
2180 case EQ_EXPR
: val
= (cmp
== 0); break;
2181 case NE_EXPR
: val
= (cmp
!= 0); break;
2182 case LT_EXPR
: val
= (cmp
<= 0); break;
2183 case GT_EXPR
: val
= (cmp
>= 0); break;
2184 case LE_EXPR
: val
= (cmp
< 0); break;
2185 case GE_EXPR
: val
= (cmp
> 0); break;
2190 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2193 /* Chose the more restrictive of two < or <= comparisons. */
2194 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2195 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2197 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2198 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2200 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2203 /* Likewise chose the more restrictive of two > or >= comparisons. */
2204 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2205 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2207 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2208 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2210 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2213 /* Check for singleton ranges. */
2215 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
2216 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
2217 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
2219 /* Check for disjoint ranges. */
2221 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2222 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2223 return boolean_false_node
;
2225 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2226 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2227 return boolean_false_node
;
2230 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2231 NAME's definition is a truth value. See if there are any simplifications
2232 that can be done against the NAME's definition. */
2233 if (TREE_CODE (op1a
) == SSA_NAME
2234 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2235 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2237 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2238 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2239 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2240 switch (gimple_code (stmt
))
2243 /* Try to simplify by copy-propagating the definition. */
2244 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2247 /* If every argument to the PHI produces the same result when
2248 ANDed with the second comparison, we win.
2249 Do not do this unless the type is bool since we need a bool
2250 result here anyway. */
2251 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2253 tree result
= NULL_TREE
;
2255 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2257 tree arg
= gimple_phi_arg_def (stmt
, i
);
2259 /* If this PHI has itself as an argument, ignore it.
2260 If all the other args produce the same result,
2262 if (arg
== gimple_phi_result (stmt
))
2264 else if (TREE_CODE (arg
) == INTEGER_CST
)
2266 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
2269 result
= boolean_false_node
;
2270 else if (!integer_zerop (result
))
2274 result
= fold_build2 (code2
, boolean_type_node
,
2276 else if (!same_bool_comparison_p (result
,
2280 else if (TREE_CODE (arg
) == SSA_NAME
2281 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2284 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2285 /* In simple cases we can look through PHI nodes,
2286 but we have to be careful with loops.
2288 if (! dom_info_available_p (CDI_DOMINATORS
)
2289 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2290 || dominated_by_p (CDI_DOMINATORS
,
2291 gimple_bb (def_stmt
),
2294 temp
= and_var_with_comparison (arg
, invert
, code2
,
2300 else if (!same_bool_result_p (result
, temp
))
2316 /* Try to simplify the AND of two comparisons, specified by
2317 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2318 If this can be simplified to a single expression (without requiring
2319 introducing more SSA variables to hold intermediate values),
2320 return the resulting tree. Otherwise return NULL_TREE.
2321 If the result expression is non-null, it has boolean type. */
2324 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2325 enum tree_code code2
, tree op2a
, tree op2b
)
2327 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2331 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2334 /* Helper function for or_comparisons_1: try to simplify the OR of the
2335 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2336 If INVERT is true, invert the value of VAR before doing the OR.
2337 Return NULL_EXPR if we can't simplify this to a single expression. */
2340 or_var_with_comparison (tree var
, bool invert
,
2341 enum tree_code code2
, tree op2a
, tree op2b
)
2344 gimple stmt
= SSA_NAME_DEF_STMT (var
);
2346 /* We can only deal with variables whose definitions are assignments. */
2347 if (!is_gimple_assign (stmt
))
2350 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2351 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2352 Then we only have to consider the simpler non-inverted cases. */
2354 t
= and_var_with_comparison_1 (stmt
,
2355 invert_tree_comparison (code2
, false),
2358 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2359 return canonicalize_bool (t
, invert
);
2362 /* Try to simplify the OR of the ssa variable defined by the assignment
2363 STMT with the comparison specified by (OP2A CODE2 OP2B).
2364 Return NULL_EXPR if we can't simplify this to a single expression. */
2367 or_var_with_comparison_1 (gimple stmt
,
2368 enum tree_code code2
, tree op2a
, tree op2b
)
2370 tree var
= gimple_assign_lhs (stmt
);
2371 tree true_test_var
= NULL_TREE
;
2372 tree false_test_var
= NULL_TREE
;
2373 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2375 /* Check for identities like (var OR (var != 0)) => true . */
2376 if (TREE_CODE (op2a
) == SSA_NAME
2377 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2379 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2380 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2382 true_test_var
= op2a
;
2383 if (var
== true_test_var
)
2386 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2387 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2389 false_test_var
= op2a
;
2390 if (var
== false_test_var
)
2391 return boolean_true_node
;
2395 /* If the definition is a comparison, recurse on it. */
2396 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2398 tree t
= or_comparisons_1 (innercode
,
2399 gimple_assign_rhs1 (stmt
),
2400 gimple_assign_rhs2 (stmt
),
2408 /* If the definition is an AND or OR expression, we may be able to
2409 simplify by reassociating. */
2410 if (innercode
== TRUTH_AND_EXPR
2411 || innercode
== TRUTH_OR_EXPR
2412 || (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2413 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
)))
2415 tree inner1
= gimple_assign_rhs1 (stmt
);
2416 tree inner2
= gimple_assign_rhs2 (stmt
);
2419 tree partial
= NULL_TREE
;
2420 bool is_or
= (innercode
== TRUTH_OR_EXPR
|| innercode
== BIT_IOR_EXPR
);
2422 /* Check for boolean identities that don't require recursive examination
2424 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2425 inner1 OR (inner1 AND inner2) => inner1
2426 !inner1 OR (inner1 OR inner2) => true
2427 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2429 if (inner1
== true_test_var
)
2430 return (is_or
? var
: inner1
);
2431 else if (inner2
== true_test_var
)
2432 return (is_or
? var
: inner2
);
2433 else if (inner1
== false_test_var
)
2436 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2437 else if (inner2
== false_test_var
)
2440 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2442 /* Next, redistribute/reassociate the OR across the inner tests.
2443 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2444 if (TREE_CODE (inner1
) == SSA_NAME
2445 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2446 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2447 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2448 gimple_assign_rhs1 (s
),
2449 gimple_assign_rhs2 (s
),
2450 code2
, op2a
, op2b
)))
2452 /* Handle the OR case, where we are reassociating:
2453 (inner1 OR inner2) OR (op2a code2 op2b)
2455 If the partial result t is a constant, we win. Otherwise
2456 continue on to try reassociating with the other inner test. */
2459 if (integer_onep (t
))
2460 return boolean_true_node
;
2461 else if (integer_zerop (t
))
2465 /* Handle the AND case, where we are redistributing:
2466 (inner1 AND inner2) OR (op2a code2 op2b)
2467 => (t AND (inner2 OR (op2a code op2b))) */
2468 else if (integer_zerop (t
))
2469 return boolean_false_node
;
2471 /* Save partial result for later. */
2475 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2476 if (TREE_CODE (inner2
) == SSA_NAME
2477 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2478 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2479 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2480 gimple_assign_rhs1 (s
),
2481 gimple_assign_rhs2 (s
),
2482 code2
, op2a
, op2b
)))
2484 /* Handle the OR case, where we are reassociating:
2485 (inner1 OR inner2) OR (op2a code2 op2b)
2487 => (t OR partial) */
2490 if (integer_zerop (t
))
2492 else if (integer_onep (t
))
2493 return boolean_true_node
;
2494 /* If both are the same, we can apply the identity
2496 else if (partial
&& same_bool_result_p (t
, partial
))
2500 /* Handle the AND case, where we are redistributing:
2501 (inner1 AND inner2) OR (op2a code2 op2b)
2502 => (t AND (inner1 OR (op2a code2 op2b)))
2503 => (t AND partial) */
2506 if (integer_zerop (t
))
2507 return boolean_false_node
;
2510 /* We already got a simplification for the other
2511 operand to the redistributed AND expression. The
2512 interesting case is when at least one is true.
2513 Or, if both are the same, we can apply the identity
2515 if (integer_onep (partial
))
2517 else if (integer_onep (t
))
2519 else if (same_bool_result_p (t
, partial
))
2528 /* Try to simplify the OR of two comparisons defined by
2529 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2530 If this can be done without constructing an intermediate value,
2531 return the resulting tree; otherwise NULL_TREE is returned.
2532 This function is deliberately asymmetric as it recurses on SSA_DEFs
2533 in the first comparison but not the second. */
2536 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2537 enum tree_code code2
, tree op2a
, tree op2b
)
2539 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2540 if (operand_equal_p (op1a
, op2a
, 0)
2541 && operand_equal_p (op1b
, op2b
, 0))
2543 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2544 TRUTH_ORIF_EXPR
, code1
, code2
,
2545 boolean_type_node
, op1a
, op1b
);
2550 /* Likewise the swapped case of the above. */
2551 if (operand_equal_p (op1a
, op2b
, 0)
2552 && operand_equal_p (op1b
, op2a
, 0))
2554 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2555 TRUTH_ORIF_EXPR
, code1
,
2556 swap_tree_comparison (code2
),
2557 boolean_type_node
, op1a
, op1b
);
2562 /* If both comparisons are of the same value against constants, we might
2563 be able to merge them. */
2564 if (operand_equal_p (op1a
, op2a
, 0)
2565 && TREE_CODE (op1b
) == INTEGER_CST
2566 && TREE_CODE (op2b
) == INTEGER_CST
)
2568 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2570 /* If we have (op1a != op1b), we should either be able to
2571 return that or TRUE, depending on whether the constant op1b
2572 also satisfies the other comparison against op2b. */
2573 if (code1
== NE_EXPR
)
2579 case EQ_EXPR
: val
= (cmp
== 0); break;
2580 case NE_EXPR
: val
= (cmp
!= 0); break;
2581 case LT_EXPR
: val
= (cmp
< 0); break;
2582 case GT_EXPR
: val
= (cmp
> 0); break;
2583 case LE_EXPR
: val
= (cmp
<= 0); break;
2584 case GE_EXPR
: val
= (cmp
>= 0); break;
2585 default: done
= false;
2590 return boolean_true_node
;
2592 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2595 /* Likewise if the second comparison is a != comparison. */
2596 else if (code2
== NE_EXPR
)
2602 case EQ_EXPR
: val
= (cmp
== 0); break;
2603 case NE_EXPR
: val
= (cmp
!= 0); break;
2604 case LT_EXPR
: val
= (cmp
> 0); break;
2605 case GT_EXPR
: val
= (cmp
< 0); break;
2606 case LE_EXPR
: val
= (cmp
>= 0); break;
2607 case GE_EXPR
: val
= (cmp
<= 0); break;
2608 default: done
= false;
2613 return boolean_true_node
;
2615 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2619 /* See if an equality test is redundant with the other comparison. */
2620 else if (code1
== EQ_EXPR
)
2625 case EQ_EXPR
: val
= (cmp
== 0); break;
2626 case NE_EXPR
: val
= (cmp
!= 0); break;
2627 case LT_EXPR
: val
= (cmp
< 0); break;
2628 case GT_EXPR
: val
= (cmp
> 0); break;
2629 case LE_EXPR
: val
= (cmp
<= 0); break;
2630 case GE_EXPR
: val
= (cmp
>= 0); break;
2635 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2637 else if (code2
== EQ_EXPR
)
2642 case EQ_EXPR
: val
= (cmp
== 0); break;
2643 case NE_EXPR
: val
= (cmp
!= 0); break;
2644 case LT_EXPR
: val
= (cmp
> 0); break;
2645 case GT_EXPR
: val
= (cmp
< 0); break;
2646 case LE_EXPR
: val
= (cmp
>= 0); break;
2647 case GE_EXPR
: val
= (cmp
<= 0); break;
2652 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2655 /* Chose the less restrictive of two < or <= comparisons. */
2656 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2657 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2659 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2660 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2662 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2665 /* Likewise chose the less restrictive of two > or >= comparisons. */
2666 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2667 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2669 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2670 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2672 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2675 /* Check for singleton ranges. */
2677 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2678 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2679 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2681 /* Check for less/greater pairs that don't restrict the range at all. */
2683 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2684 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2685 return boolean_true_node
;
2687 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2688 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2689 return boolean_true_node
;
2692 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2693 NAME's definition is a truth value. See if there are any simplifications
2694 that can be done against the NAME's definition. */
2695 if (TREE_CODE (op1a
) == SSA_NAME
2696 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2697 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2699 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2700 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2701 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2702 switch (gimple_code (stmt
))
2705 /* Try to simplify by copy-propagating the definition. */
2706 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2709 /* If every argument to the PHI produces the same result when
2710 ORed with the second comparison, we win.
2711 Do not do this unless the type is bool since we need a bool
2712 result here anyway. */
2713 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2715 tree result
= NULL_TREE
;
2717 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2719 tree arg
= gimple_phi_arg_def (stmt
, i
);
2721 /* If this PHI has itself as an argument, ignore it.
2722 If all the other args produce the same result,
2724 if (arg
== gimple_phi_result (stmt
))
2726 else if (TREE_CODE (arg
) == INTEGER_CST
)
2728 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2731 result
= boolean_true_node
;
2732 else if (!integer_onep (result
))
2736 result
= fold_build2 (code2
, boolean_type_node
,
2738 else if (!same_bool_comparison_p (result
,
2742 else if (TREE_CODE (arg
) == SSA_NAME
2743 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2746 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2747 /* In simple cases we can look through PHI nodes,
2748 but we have to be careful with loops.
2750 if (! dom_info_available_p (CDI_DOMINATORS
)
2751 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2752 || dominated_by_p (CDI_DOMINATORS
,
2753 gimple_bb (def_stmt
),
2756 temp
= or_var_with_comparison (arg
, invert
, code2
,
2762 else if (!same_bool_result_p (result
, temp
))
2778 /* Try to simplify the OR of two comparisons, specified by
2779 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2780 If this can be simplified to a single expression (without requiring
2781 introducing more SSA variables to hold intermediate values),
2782 return the resulting tree. Otherwise return NULL_TREE.
2783 If the result expression is non-null, it has boolean type. */
2786 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2787 enum tree_code code2
, tree op2a
, tree op2b
)
2789 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2793 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2797 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2799 Either NULL_TREE, a simplified but non-constant or a constant
2802 ??? This should go into a gimple-fold-inline.h file to be eventually
2803 privatized with the single valueize function used in the various TUs
2804 to avoid the indirect function call overhead. */
2807 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2809 location_t loc
= gimple_location (stmt
);
2810 switch (gimple_code (stmt
))
2814 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2816 switch (get_gimple_rhs_class (subcode
))
2818 case GIMPLE_SINGLE_RHS
:
2820 tree rhs
= gimple_assign_rhs1 (stmt
);
2821 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2823 if (TREE_CODE (rhs
) == SSA_NAME
)
2825 /* If the RHS is an SSA_NAME, return its known constant value,
2827 return (*valueize
) (rhs
);
2829 /* Handle propagating invariant addresses into address
2831 else if (TREE_CODE (rhs
) == ADDR_EXPR
2832 && !is_gimple_min_invariant (rhs
))
2834 HOST_WIDE_INT offset
;
2836 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2840 && (CONSTANT_CLASS_P (base
)
2841 || decl_address_invariant_p (base
)))
2842 return build_invariant_address (TREE_TYPE (rhs
),
2845 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2846 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2847 && (CONSTRUCTOR_NELTS (rhs
)
2848 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2854 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2856 val
= (*valueize
) (val
);
2857 if (TREE_CODE (val
) == INTEGER_CST
2858 || TREE_CODE (val
) == REAL_CST
2859 || TREE_CODE (val
) == FIXED_CST
)
2860 list
= tree_cons (NULL_TREE
, val
, list
);
2865 return build_vector (TREE_TYPE (rhs
), nreverse (list
));
2868 if (kind
== tcc_reference
)
2870 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2871 || TREE_CODE (rhs
) == REALPART_EXPR
2872 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2873 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2875 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2876 return fold_unary_loc (EXPR_LOCATION (rhs
),
2878 TREE_TYPE (rhs
), val
);
2880 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2881 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2883 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2884 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2886 TREE_TYPE (rhs
), val
,
2887 TREE_OPERAND (rhs
, 1),
2888 TREE_OPERAND (rhs
, 2));
2890 else if (TREE_CODE (rhs
) == MEM_REF
2891 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2893 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2894 if (TREE_CODE (val
) == ADDR_EXPR
2895 && is_gimple_min_invariant (val
))
2897 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2899 TREE_OPERAND (rhs
, 1));
2904 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2906 else if (kind
== tcc_declaration
)
2907 return get_symbol_constant_value (rhs
);
2911 case GIMPLE_UNARY_RHS
:
2913 /* Handle unary operators that can appear in GIMPLE form.
2914 Note that we know the single operand must be a constant,
2915 so this should almost always return a simplified RHS. */
2916 tree lhs
= gimple_assign_lhs (stmt
);
2917 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2919 /* Conversions are useless for CCP purposes if they are
2920 value-preserving. Thus the restrictions that
2921 useless_type_conversion_p places for pointer type conversions
2922 do not apply here. Substitution later will only substitute to
2924 if (CONVERT_EXPR_CODE_P (subcode
)
2925 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2926 && POINTER_TYPE_P (TREE_TYPE (op0
)))
2929 /* Try to re-construct array references on-the-fly. */
2930 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
2932 && ((tem
= maybe_fold_offset_to_address
2934 op0
, integer_zero_node
, TREE_TYPE (lhs
)))
2941 fold_unary_ignore_overflow_loc (loc
, subcode
,
2942 gimple_expr_type (stmt
), op0
);
2945 case GIMPLE_BINARY_RHS
:
2947 /* Handle binary operators that can appear in GIMPLE form. */
2948 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2949 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2951 /* Translate &x + CST into an invariant form suitable for
2952 further propagation. */
2953 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2954 && TREE_CODE (op0
) == ADDR_EXPR
2955 && TREE_CODE (op1
) == INTEGER_CST
)
2957 tree off
= fold_convert (ptr_type_node
, op1
);
2958 return build_fold_addr_expr
2959 (fold_build2 (MEM_REF
,
2960 TREE_TYPE (TREE_TYPE (op0
)),
2961 unshare_expr (op0
), off
));
2964 return fold_binary_loc (loc
, subcode
,
2965 gimple_expr_type (stmt
), op0
, op1
);
2968 case GIMPLE_TERNARY_RHS
:
2970 /* Handle ternary operators that can appear in GIMPLE form. */
2971 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2972 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2973 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2975 return fold_ternary_loc (loc
, subcode
,
2976 gimple_expr_type (stmt
), op0
, op1
, op2
);
2988 if (gimple_call_internal_p (stmt
))
2989 /* No folding yet for these functions. */
2992 fn
= (*valueize
) (gimple_call_fn (stmt
));
2993 if (TREE_CODE (fn
) == ADDR_EXPR
2994 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2995 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2997 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
3000 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3001 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
3002 call
= build_call_array_loc (loc
,
3003 gimple_call_return_type (stmt
),
3004 fn
, gimple_call_num_args (stmt
), args
);
3005 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
3007 /* fold_call_expr wraps the result inside a NOP_EXPR. */
3008 STRIP_NOPS (retval
);
3019 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
3020 Returns NULL_TREE if folding to a constant is not possible, otherwise
3021 returns a constant according to is_gimple_min_invariant. */
3024 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
3026 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
3027 if (res
&& is_gimple_min_invariant (res
))
3033 /* The following set of functions are supposed to fold references using
3034 their constant initializers. */
3036 static tree
fold_ctor_reference (tree type
, tree ctor
,
3037 unsigned HOST_WIDE_INT offset
,
3038 unsigned HOST_WIDE_INT size
);
3040 /* See if we can find constructor defining value of BASE.
3041 When we know the consructor with constant offset (such as
3042 base is array[40] and we do know constructor of array), then
3043 BIT_OFFSET is adjusted accordingly.
3045 As a special case, return error_mark_node when constructor
3046 is not explicitly available, but it is known to be zero
3047 such as 'static const int a;'. */
3049 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
3050 tree (*valueize
)(tree
))
3052 HOST_WIDE_INT bit_offset2
, size
, max_size
;
3053 if (TREE_CODE (base
) == MEM_REF
)
3055 if (!integer_zerop (TREE_OPERAND (base
, 1)))
3057 if (!host_integerp (TREE_OPERAND (base
, 1), 0))
3059 *bit_offset
+= (mem_ref_offset (base
).low
3064 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
3065 base
= valueize (TREE_OPERAND (base
, 0));
3066 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
3068 base
= TREE_OPERAND (base
, 0);
3071 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
3072 DECL_INITIAL. If BASE is a nested reference into another
3073 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
3074 the inner reference. */
3075 switch (TREE_CODE (base
))
3078 if (!const_value_known_p (base
))
3083 if (!DECL_INITIAL (base
)
3084 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
)))
3085 return error_mark_node
;
3086 return DECL_INITIAL (base
);
3090 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
3091 if (max_size
== -1 || size
!= max_size
)
3093 *bit_offset
+= bit_offset2
;
3094 return get_base_constructor (base
, bit_offset
, valueize
);
3105 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
3106 to the memory at bit OFFSET.
3108 We do only simple job of folding byte accesses. */
3111 fold_string_cst_ctor_reference (tree type
, tree ctor
,
3112 unsigned HOST_WIDE_INT offset
,
3113 unsigned HOST_WIDE_INT size
)
3115 if (INTEGRAL_TYPE_P (type
)
3116 && (TYPE_MODE (type
)
3117 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
3118 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
3120 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
3121 && size
== BITS_PER_UNIT
3122 && !(offset
% BITS_PER_UNIT
))
3124 offset
/= BITS_PER_UNIT
;
3125 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
3126 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
3129 const char a[20]="hello";
3132 might lead to offset greater than string length. In this case we
3133 know value is either initialized to 0 or out of bounds. Return 0
3135 return build_zero_cst (type
);
3140 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3141 SIZE to the memory at bit OFFSET. */
3144 fold_array_ctor_reference (tree type
, tree ctor
,
3145 unsigned HOST_WIDE_INT offset
,
3146 unsigned HOST_WIDE_INT size
)
3148 unsigned HOST_WIDE_INT cnt
;
3150 double_int low_bound
, elt_size
;
3151 double_int index
, max_index
;
3152 double_int access_index
;
3153 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
3154 HOST_WIDE_INT inner_offset
;
3156 /* Compute low bound and elt size. */
3157 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
3159 /* Static constructors for variably sized objects makes no sense. */
3160 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
3161 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
3164 low_bound
= double_int_zero
;
3165 /* Static constructors for variably sized objects makes no sense. */
3166 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
3169 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
3172 /* We can handle only constantly sized accesses that are known to not
3173 be larger than size of array element. */
3174 if (!TYPE_SIZE_UNIT (type
)
3175 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
3176 || double_int_cmp (elt_size
,
3177 tree_to_double_int (TYPE_SIZE_UNIT (type
)), 0) < 0)
3180 /* Compute the array index we look for. */
3181 access_index
= double_int_udiv (uhwi_to_double_int (offset
/ BITS_PER_UNIT
),
3182 elt_size
, TRUNC_DIV_EXPR
);
3183 access_index
= double_int_add (access_index
, low_bound
);
3185 /* And offset within the access. */
3186 inner_offset
= offset
% (double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
);
3188 /* See if the array field is large enough to span whole access. We do not
3189 care to fold accesses spanning multiple array indexes. */
3190 if (inner_offset
+ size
> double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
)
3193 index
= double_int_sub (low_bound
, double_int_one
);
3194 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
3196 /* Array constructor might explicitely set index, or specify range
3197 or leave index NULL meaning that it is next index after previous
3201 if (TREE_CODE (cfield
) == INTEGER_CST
)
3202 max_index
= index
= tree_to_double_int (cfield
);
3205 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
3206 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
3207 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
3211 max_index
= index
= double_int_add (index
, double_int_one
);
3213 /* Do we have match? */
3214 if (double_int_cmp (access_index
, index
, 1) >= 0
3215 && double_int_cmp (access_index
, max_index
, 1) <= 0)
3216 return fold_ctor_reference (type
, cval
, inner_offset
, size
);
3218 /* When memory is not explicitely mentioned in constructor,
3219 it is 0 (or out of range). */
3220 return build_zero_cst (type
);
3223 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3224 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3227 fold_nonarray_ctor_reference (tree type
, tree ctor
,
3228 unsigned HOST_WIDE_INT offset
,
3229 unsigned HOST_WIDE_INT size
)
3231 unsigned HOST_WIDE_INT cnt
;
3234 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
3237 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
3238 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
3239 tree field_size
= DECL_SIZE (cfield
);
3240 double_int bitoffset
;
3241 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
3242 double_int bits_per_unit_cst
= uhwi_to_double_int (BITS_PER_UNIT
);
3243 double_int bitoffset_end
;
3245 /* Variable sized objects in static constructors makes no sense,
3246 but field_size can be NULL for flexible array members. */
3247 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
3248 && TREE_CODE (byte_offset
) == INTEGER_CST
3249 && (field_size
!= NULL_TREE
3250 ? TREE_CODE (field_size
) == INTEGER_CST
3251 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
3253 /* Compute bit offset of the field. */
3254 bitoffset
= double_int_add (tree_to_double_int (field_offset
),
3255 double_int_mul (byte_offset_cst
,
3256 bits_per_unit_cst
));
3257 /* Compute bit offset where the field ends. */
3258 if (field_size
!= NULL_TREE
)
3259 bitoffset_end
= double_int_add (bitoffset
,
3260 tree_to_double_int (field_size
));
3262 bitoffset_end
= double_int_zero
;
3264 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
3265 if (double_int_cmp (uhwi_to_double_int (offset
), bitoffset
, 0) >= 0
3266 && (field_size
== NULL_TREE
3267 || double_int_cmp (uhwi_to_double_int (offset
),
3268 bitoffset_end
, 0) < 0))
3270 double_int access_end
= double_int_add (uhwi_to_double_int (offset
),
3271 uhwi_to_double_int (size
));
3272 double_int inner_offset
= double_int_sub (uhwi_to_double_int (offset
),
3274 /* We do have overlap. Now see if field is large enough to
3275 cover the access. Give up for accesses spanning multiple
3277 if (double_int_cmp (access_end
, bitoffset_end
, 0) > 0)
3279 return fold_ctor_reference (type
, cval
,
3280 double_int_to_uhwi (inner_offset
), size
);
3283 /* When memory is not explicitely mentioned in constructor, it is 0. */
3284 return build_zero_cst (type
);
3287 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3288 to the memory at bit OFFSET. */
3291 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
3292 unsigned HOST_WIDE_INT size
)
3296 /* We found the field with exact match. */
3297 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
3299 return canonicalize_constructor_val (ctor
);
3301 /* We are at the end of walk, see if we can view convert the
3303 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
3304 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3305 && operand_equal_p (TYPE_SIZE (type
),
3306 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
3308 ret
= canonicalize_constructor_val (ctor
);
3309 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
3314 if (TREE_CODE (ctor
) == STRING_CST
)
3315 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
3316 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
3319 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
3320 return fold_array_ctor_reference (type
, ctor
, offset
, size
);
3322 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
);
3328 /* Return the tree representing the element referenced by T if T is an
3329 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3330 names using VALUEIZE. Return NULL_TREE otherwise. */
3333 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
3335 tree ctor
, idx
, base
;
3336 HOST_WIDE_INT offset
, size
, max_size
;
3339 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3340 return get_symbol_constant_value (t
);
3342 tem
= fold_read_from_constant_string (t
);
3346 switch (TREE_CODE (t
))
3349 case ARRAY_RANGE_REF
:
3350 /* Constant indexes are handled well by get_base_constructor.
3351 Only special case variable offsets.
3352 FIXME: This code can't handle nested references with variable indexes
3353 (they will be handled only by iteration of ccp). Perhaps we can bring
3354 get_ref_base_and_extent here and make it use a valueize callback. */
3355 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3357 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3358 && host_integerp (idx
, 0))
3360 tree low_bound
, unit_size
;
3362 /* If the resulting bit-offset is constant, track it. */
3363 if ((low_bound
= array_ref_low_bound (t
),
3364 host_integerp (low_bound
, 0))
3365 && (unit_size
= array_ref_element_size (t
),
3366 host_integerp (unit_size
, 1)))
3368 offset
= TREE_INT_CST_LOW (idx
);
3369 offset
-= TREE_INT_CST_LOW (low_bound
);
3370 offset
*= TREE_INT_CST_LOW (unit_size
);
3371 offset
*= BITS_PER_UNIT
;
3373 base
= TREE_OPERAND (t
, 0);
3374 ctor
= get_base_constructor (base
, &offset
, valueize
);
3375 /* Empty constructor. Always fold to 0. */
3376 if (ctor
== error_mark_node
)
3377 return build_zero_cst (TREE_TYPE (t
));
3378 /* Out of bound array access. Value is undefined,
3382 /* We can not determine ctor. */
3385 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3386 TREE_INT_CST_LOW (unit_size
)
3394 case TARGET_MEM_REF
:
3396 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3397 ctor
= get_base_constructor (base
, &offset
, valueize
);
3399 /* Empty constructor. Always fold to 0. */
3400 if (ctor
== error_mark_node
)
3401 return build_zero_cst (TREE_TYPE (t
));
3402 /* We do not know precise address. */
3403 if (max_size
== -1 || max_size
!= size
)
3405 /* We can not determine ctor. */
3409 /* Out of bound array access. Value is undefined, but don't fold. */
3413 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
);
3418 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3419 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3420 return fold_build1_loc (EXPR_LOCATION (t
),
3421 TREE_CODE (t
), TREE_TYPE (t
), c
);
3433 fold_const_aggregate_ref (tree t
)
3435 return fold_const_aggregate_ref_1 (t
, NULL
);
3438 /* Return true iff VAL is a gimple expression that is known to be
3439 non-negative. Restricted to floating-point inputs. */
3442 gimple_val_nonnegative_real_p (tree val
)
3446 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3448 /* Use existing logic for non-gimple trees. */
3449 if (tree_expr_nonnegative_p (val
))
3452 if (TREE_CODE (val
) != SSA_NAME
)
3455 /* Currently we look only at the immediately defining statement
3456 to make this determination, since recursion on defining
3457 statements of operands can lead to quadratic behavior in the
3458 worst case. This is expected to catch almost all occurrences
3459 in practice. It would be possible to implement limited-depth
3460 recursion if important cases are lost. Alternatively, passes
3461 that need this information (such as the pow/powi lowering code
3462 in the cse_sincos pass) could be revised to provide it through
3463 dataflow propagation. */
3465 def_stmt
= SSA_NAME_DEF_STMT (val
);
3467 if (is_gimple_assign (def_stmt
))
3471 /* See fold-const.c:tree_expr_nonnegative_p for additional
3472 cases that could be handled with recursion. */
3474 switch (gimple_assign_rhs_code (def_stmt
))
3477 /* Always true for floating-point operands. */
3481 /* True if the two operands are identical (since we are
3482 restricted to floating-point inputs). */
3483 op0
= gimple_assign_rhs1 (def_stmt
);
3484 op1
= gimple_assign_rhs2 (def_stmt
);
3487 || operand_equal_p (op0
, op1
, 0))
3494 else if (is_gimple_call (def_stmt
))
3496 tree fndecl
= gimple_call_fndecl (def_stmt
);
3498 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3502 switch (DECL_FUNCTION_CODE (fndecl
))
3504 CASE_FLT_FN (BUILT_IN_ACOS
):
3505 CASE_FLT_FN (BUILT_IN_ACOSH
):
3506 CASE_FLT_FN (BUILT_IN_CABS
):
3507 CASE_FLT_FN (BUILT_IN_COSH
):
3508 CASE_FLT_FN (BUILT_IN_ERFC
):
3509 CASE_FLT_FN (BUILT_IN_EXP
):
3510 CASE_FLT_FN (BUILT_IN_EXP10
):
3511 CASE_FLT_FN (BUILT_IN_EXP2
):
3512 CASE_FLT_FN (BUILT_IN_FABS
):
3513 CASE_FLT_FN (BUILT_IN_FDIM
):
3514 CASE_FLT_FN (BUILT_IN_HYPOT
):
3515 CASE_FLT_FN (BUILT_IN_POW10
):
3518 CASE_FLT_FN (BUILT_IN_SQRT
):
3519 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3520 nonnegative inputs. */
3521 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3526 CASE_FLT_FN (BUILT_IN_POWI
):
3527 /* True if the second argument is an even integer. */
3528 arg1
= gimple_call_arg (def_stmt
, 1);
3530 if (TREE_CODE (arg1
) == INTEGER_CST
3531 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3536 CASE_FLT_FN (BUILT_IN_POW
):
3537 /* True if the second argument is an even integer-valued
3539 arg1
= gimple_call_arg (def_stmt
, 1);
3541 if (TREE_CODE (arg1
) == REAL_CST
)
3546 c
= TREE_REAL_CST (arg1
);
3547 n
= real_to_integer (&c
);
3551 REAL_VALUE_TYPE cint
;
3552 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3553 if (real_identical (&c
, &cint
))