1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl
)
59 struct varpool_node
*vnode
;
60 struct cgraph_node
*node
;
62 if (!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl
) && TREE_STATIC (decl
)
67 && TREE_CODE (decl
) == VAR_DECL
)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
72 gcc_checking_assert (!(vnode
= varpool_get_node (decl
))
73 || !vnode
->finalized
);
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl
))
92 if (TREE_CODE (decl
) == FUNCTION_DECL
)
94 node
= cgraph_get_node (decl
);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
100 if (!node
|| !node
->analyzed
|| node
->global
.inlined_to
)
103 else if (TREE_CODE (decl
) == VAR_DECL
)
105 vnode
= varpool_get_node (decl
);
106 if (!vnode
|| !vnode
->finalized
)
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
116 canonicalize_constructor_val (tree cval
)
119 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
)
121 tree t
= maybe_fold_offset_to_address (EXPR_LOCATION (cval
),
122 TREE_OPERAND (cval
, 0),
123 TREE_OPERAND (cval
, 1),
128 if (TREE_CODE (cval
) == ADDR_EXPR
)
130 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
133 && (TREE_CODE (base
) == VAR_DECL
134 || TREE_CODE (base
) == FUNCTION_DECL
)
135 && !can_refer_decl_in_current_unit_p (base
))
137 if (cfun
&& base
&& TREE_CODE (base
) == VAR_DECL
)
138 add_referenced_var (base
);
139 /* Fixup types in global initializers. */
140 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
141 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
146 /* If SYM is a constant variable with known value, return the value.
147 NULL_TREE is returned otherwise. */
150 get_symbol_constant_value (tree sym
)
152 if (const_value_known_p (sym
))
154 tree val
= DECL_INITIAL (sym
);
157 val
= canonicalize_constructor_val (val
);
158 if (val
&& is_gimple_min_invariant (val
))
163 /* Variables declared 'const' without an initializer
164 have zero as the initializer if they may not be
165 overridden at link or run time. */
167 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
168 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
169 return build_zero_cst (TREE_TYPE (sym
));
176 /* Return true if we may propagate the address expression ADDR into the
177 dereference DEREF and cancel them. */
180 may_propagate_address_into_dereference (tree addr
, tree deref
)
182 gcc_assert (TREE_CODE (deref
) == MEM_REF
183 && TREE_CODE (addr
) == ADDR_EXPR
);
185 /* Don't propagate if ADDR's operand has incomplete type. */
186 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
189 /* If the address is invariant then we do not need to preserve restrict
190 qualifications. But we do need to preserve volatile qualifiers until
191 we can annotate the folded dereference itself properly. */
192 if (is_gimple_min_invariant (addr
)
193 && (!TREE_THIS_VOLATILE (deref
)
194 || TYPE_VOLATILE (TREE_TYPE (addr
))))
195 return useless_type_conversion_p (TREE_TYPE (deref
),
196 TREE_TYPE (TREE_OPERAND (addr
, 0)));
198 /* Else both the address substitution and the folding must result in
199 a valid useless type conversion sequence. */
200 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
202 && useless_type_conversion_p (TREE_TYPE (deref
),
203 TREE_TYPE (TREE_OPERAND (addr
, 0))));
207 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
208 BASE is an array type. OFFSET is a byte displacement.
210 LOC is the location of the original expression. */
213 maybe_fold_offset_to_array_ref (location_t loc
, tree base
, tree offset
)
215 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
216 tree array_type
, elt_type
, elt_size
;
219 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
220 measured in units of the size of elements type) from that ARRAY_REF).
221 We can't do anything if either is variable.
223 The case we handle here is *(&A[N]+O). */
224 if (TREE_CODE (base
) == ARRAY_REF
)
226 tree low_bound
= array_ref_low_bound (base
);
228 elt_offset
= TREE_OPERAND (base
, 1);
229 if (TREE_CODE (low_bound
) != INTEGER_CST
230 || TREE_CODE (elt_offset
) != INTEGER_CST
)
233 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
);
234 base
= TREE_OPERAND (base
, 0);
237 /* Ignore stupid user tricks of indexing non-array variables. */
238 array_type
= TREE_TYPE (base
);
239 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
241 elt_type
= TREE_TYPE (array_type
);
243 /* Use signed size type for intermediate computation on the index. */
244 idx_type
= ssizetype
;
246 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
247 element type (so we can use the alignment if it's not constant).
248 Otherwise, compute the offset as an index by using a division. If the
249 division isn't exact, then don't do anything. */
250 elt_size
= TYPE_SIZE_UNIT (elt_type
);
253 if (integer_zerop (offset
))
255 if (TREE_CODE (elt_size
) != INTEGER_CST
)
256 elt_size
= size_int (TYPE_ALIGN (elt_type
));
258 idx
= build_int_cst (idx_type
, 0);
262 unsigned HOST_WIDE_INT lquo
, lrem
;
263 HOST_WIDE_INT hquo
, hrem
;
266 /* The final array offset should be signed, so we need
267 to sign-extend the (possibly pointer) offset here
268 and use signed division. */
269 soffset
= double_int_sext (tree_to_double_int (offset
),
270 TYPE_PRECISION (TREE_TYPE (offset
)));
271 if (TREE_CODE (elt_size
) != INTEGER_CST
272 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
273 soffset
.low
, soffset
.high
,
274 TREE_INT_CST_LOW (elt_size
),
275 TREE_INT_CST_HIGH (elt_size
),
276 &lquo
, &hquo
, &lrem
, &hrem
)
280 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
283 /* Assume the low bound is zero. If there is a domain type, get the
284 low bound, if any, convert the index into that type, and add the
286 min_idx
= build_int_cst (idx_type
, 0);
287 domain_type
= TYPE_DOMAIN (array_type
);
290 idx_type
= domain_type
;
291 if (TYPE_MIN_VALUE (idx_type
))
292 min_idx
= TYPE_MIN_VALUE (idx_type
);
294 min_idx
= fold_convert (idx_type
, min_idx
);
296 if (TREE_CODE (min_idx
) != INTEGER_CST
)
299 elt_offset
= fold_convert (idx_type
, elt_offset
);
302 if (!integer_zerop (min_idx
))
303 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
);
304 if (!integer_zerop (elt_offset
))
305 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
);
307 /* Make sure to possibly truncate late after offsetting. */
308 idx
= fold_convert (idx_type
, idx
);
310 /* We don't want to construct access past array bounds. For example
313 should not be simplified into (*c)[14] or tree-vrp will
315 This is only an issue for multi-dimensional arrays. */
316 if (TREE_CODE (elt_type
) == ARRAY_TYPE
319 if (TYPE_MAX_VALUE (domain_type
)
320 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
321 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type
), idx
))
323 else if (TYPE_MIN_VALUE (domain_type
)
324 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
325 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
327 else if (compare_tree_int (idx
, 0) < 0)
332 tree t
= build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
333 SET_EXPR_LOCATION (t
, loc
);
339 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
340 LOC is the location of original expression.
342 Before attempting the conversion strip off existing ADDR_EXPRs. */
345 maybe_fold_offset_to_reference (location_t loc
, tree base
, tree offset
,
351 if (TREE_CODE (base
) != ADDR_EXPR
)
354 base
= TREE_OPERAND (base
, 0);
355 if (types_compatible_p (orig_type
, TREE_TYPE (base
))
356 && integer_zerop (offset
))
359 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
);
360 if (ret
&& types_compatible_p (orig_type
, TREE_TYPE (ret
)))
365 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
366 LOC is the location of the original expression. */
369 maybe_fold_offset_to_address (location_t loc
, tree addr
, tree offset
,
375 if (TREE_CODE (addr
) != ADDR_EXPR
)
377 base
= TREE_OPERAND (addr
, 0);
378 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
);
381 ret
= build_fold_addr_expr (ret
);
382 if (!useless_type_conversion_p (orig_type
, TREE_TYPE (ret
)))
384 SET_EXPR_LOCATION (ret
, loc
);
391 /* A quaint feature extant in our address arithmetic is that there
392 can be hidden type changes here. The type of the result need
393 not be the same as the type of the input pointer.
395 What we're after here is an expression of the form
396 (T *)(&array + const)
397 where array is OP0, const is OP1, RES_TYPE is T and
398 the cast doesn't actually exist, but is implicit in the
399 type of the POINTER_PLUS_EXPR. We'd like to turn this into
401 which may be able to propagate further. */
404 maybe_fold_stmt_addition (location_t loc
, tree res_type
, tree op0
, tree op1
)
409 /* The first operand should be an ADDR_EXPR. */
410 if (TREE_CODE (op0
) != ADDR_EXPR
)
412 op0
= TREE_OPERAND (op0
, 0);
414 /* It had better be a constant. */
415 if (TREE_CODE (op1
) != INTEGER_CST
)
417 /* Or op0 should now be A[0] and the non-constant offset defined
418 via a multiplication by the array element size. */
419 if (TREE_CODE (op0
) == ARRAY_REF
420 /* As we will end up creating a variable index array access
421 in the outermost array dimension make sure there isn't
422 a more inner array that the index could overflow to. */
423 && TREE_CODE (TREE_OPERAND (op0
, 0)) != ARRAY_REF
424 && integer_zerop (TREE_OPERAND (op0
, 1))
425 && TREE_CODE (op1
) == SSA_NAME
)
427 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
428 tree elsz
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
429 if (!host_integerp (elsz
, 1)
430 || !is_gimple_assign (offset_def
))
433 /* Do not build array references of something that we can't
434 see the true number of array dimensions for. */
435 if (!DECL_P (TREE_OPERAND (op0
, 0))
436 && !handled_component_p (TREE_OPERAND (op0
, 0)))
439 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
440 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
441 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
), elsz
))
442 return build_fold_addr_expr
443 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
444 TREE_OPERAND (op0
, 0),
445 gimple_assign_rhs1 (offset_def
),
446 TREE_OPERAND (op0
, 2),
447 TREE_OPERAND (op0
, 3)));
448 else if (integer_onep (elsz
)
449 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
450 return build_fold_addr_expr
451 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
452 TREE_OPERAND (op0
, 0),
454 TREE_OPERAND (op0
, 2),
455 TREE_OPERAND (op0
, 3)));
457 else if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
459 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0
))) != ARRAY_TYPE
460 && TREE_CODE (op1
) == SSA_NAME
)
462 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
463 tree elsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0
)));
464 if (!host_integerp (elsz
, 1)
465 || !is_gimple_assign (offset_def
))
468 /* Do not build array references of something that we can't
469 see the true number of array dimensions for. */
471 && !handled_component_p (op0
))
474 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
475 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
476 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
), elsz
))
477 return build_fold_addr_expr
478 (build4 (ARRAY_REF
, TREE_TYPE (TREE_TYPE (op0
)),
479 op0
, gimple_assign_rhs1 (offset_def
),
480 integer_zero_node
, NULL_TREE
));
481 else if (integer_onep (elsz
)
482 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
483 return build_fold_addr_expr
484 (build4 (ARRAY_REF
, TREE_TYPE (TREE_TYPE (op0
)),
486 integer_zero_node
, NULL_TREE
));
492 /* If the first operand is an ARRAY_REF, expand it so that we can fold
493 the offset into it. */
494 while (TREE_CODE (op0
) == ARRAY_REF
)
496 tree array_obj
= TREE_OPERAND (op0
, 0);
497 tree array_idx
= TREE_OPERAND (op0
, 1);
498 tree elt_type
= TREE_TYPE (op0
);
499 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
502 if (TREE_CODE (array_idx
) != INTEGER_CST
)
504 if (TREE_CODE (elt_size
) != INTEGER_CST
)
507 /* Un-bias the index by the min index of the array type. */
508 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
511 min_idx
= TYPE_MIN_VALUE (min_idx
);
514 if (TREE_CODE (min_idx
) != INTEGER_CST
)
517 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
518 if (!integer_zerop (min_idx
))
519 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
524 /* Convert the index to a byte offset. */
525 array_idx
= fold_convert (sizetype
, array_idx
);
526 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
);
528 /* Update the operands for the next round, or for folding. */
529 op1
= int_const_binop (PLUS_EXPR
,
534 ptd_type
= TREE_TYPE (res_type
);
535 /* If we want a pointer to void, reconstruct the reference from the
536 array element type. A pointer to that can be trivially converted
537 to void *. This happens as we fold (void *)(ptr p+ off). */
538 if (VOID_TYPE_P (ptd_type
)
539 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
540 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
542 /* At which point we can try some of the same things as for indirects. */
543 t
= maybe_fold_offset_to_array_ref (loc
, op0
, op1
);
546 t
= build_fold_addr_expr (t
);
547 if (!useless_type_conversion_p (res_type
, TREE_TYPE (t
)))
549 SET_EXPR_LOCATION (t
, loc
);
555 /* Subroutine of fold_stmt. We perform several simplifications of the
556 memory reference tree EXPR and make sure to re-gimplify them properly
557 after propagation of constant addresses. IS_LHS is true if the
558 reference is supposed to be an lvalue. */
561 maybe_fold_reference (tree expr
, bool is_lhs
)
566 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
567 || TREE_CODE (expr
) == REALPART_EXPR
568 || TREE_CODE (expr
) == IMAGPART_EXPR
)
569 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
570 return fold_unary_loc (EXPR_LOCATION (expr
),
573 TREE_OPERAND (expr
, 0));
574 else if (TREE_CODE (expr
) == BIT_FIELD_REF
575 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
576 return fold_ternary_loc (EXPR_LOCATION (expr
),
579 TREE_OPERAND (expr
, 0),
580 TREE_OPERAND (expr
, 1),
581 TREE_OPERAND (expr
, 2));
583 while (handled_component_p (*t
))
584 t
= &TREE_OPERAND (*t
, 0);
586 /* Canonicalize MEM_REFs invariant address operand. Do this first
587 to avoid feeding non-canonical MEM_REFs elsewhere. */
588 if (TREE_CODE (*t
) == MEM_REF
589 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
591 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
592 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
593 TREE_OPERAND (*t
, 0),
594 TREE_OPERAND (*t
, 1));
597 TREE_THIS_VOLATILE (tem
) = volatile_p
;
599 tem
= maybe_fold_reference (expr
, is_lhs
);
607 && (result
= fold_const_aggregate_ref (expr
))
608 && is_gimple_min_invariant (result
))
611 /* Fold back MEM_REFs to reference trees. */
612 if (TREE_CODE (*t
) == MEM_REF
613 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
614 && integer_zerop (TREE_OPERAND (*t
, 1))
615 && (TREE_THIS_VOLATILE (*t
)
616 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
617 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
618 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
619 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
620 /* We have to look out here to not drop a required conversion
621 from the rhs to the lhs if is_lhs, but we don't have the
622 rhs here to verify that. Thus require strict type
624 && types_compatible_p (TREE_TYPE (*t
),
625 TREE_TYPE (TREE_OPERAND
626 (TREE_OPERAND (*t
, 0), 0))))
629 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
630 tem
= maybe_fold_reference (expr
, is_lhs
);
635 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
637 tree tem
= maybe_fold_tmr (*t
);
641 tem
= maybe_fold_reference (expr
, is_lhs
);
652 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
653 replacement rhs for the statement or NULL_TREE if no simplification
654 could be made. It is assumed that the operands have been previously
658 fold_gimple_assign (gimple_stmt_iterator
*si
)
660 gimple stmt
= gsi_stmt (*si
);
661 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
662 location_t loc
= gimple_location (stmt
);
664 tree result
= NULL_TREE
;
666 switch (get_gimple_rhs_class (subcode
))
668 case GIMPLE_SINGLE_RHS
:
670 tree rhs
= gimple_assign_rhs1 (stmt
);
672 /* Try to fold a conditional expression. */
673 if (TREE_CODE (rhs
) == COND_EXPR
)
675 tree op0
= COND_EXPR_COND (rhs
);
678 location_t cond_loc
= EXPR_LOCATION (rhs
);
680 if (COMPARISON_CLASS_P (op0
))
682 fold_defer_overflow_warnings ();
683 tem
= fold_binary_loc (cond_loc
,
684 TREE_CODE (op0
), TREE_TYPE (op0
),
685 TREE_OPERAND (op0
, 0),
686 TREE_OPERAND (op0
, 1));
687 /* This is actually a conditional expression, not a GIMPLE
688 conditional statement, however, the valid_gimple_rhs_p
689 test still applies. */
690 set
= (tem
&& is_gimple_condexpr (tem
)
691 && valid_gimple_rhs_p (tem
));
692 fold_undefer_overflow_warnings (set
, stmt
, 0);
694 else if (is_gimple_min_invariant (op0
))
703 result
= fold_build3_loc (cond_loc
, COND_EXPR
, TREE_TYPE (rhs
), tem
,
704 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
707 else if (REFERENCE_CLASS_P (rhs
))
708 return maybe_fold_reference (rhs
, false);
710 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
712 tree ref
= TREE_OPERAND (rhs
, 0);
713 tree tem
= maybe_fold_reference (ref
, true);
715 && TREE_CODE (tem
) == MEM_REF
716 && integer_zerop (TREE_OPERAND (tem
, 1)))
717 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
719 result
= fold_convert (TREE_TYPE (rhs
),
720 build_fold_addr_expr_loc (loc
, tem
));
721 else if (TREE_CODE (ref
) == MEM_REF
722 && integer_zerop (TREE_OPERAND (ref
, 1)))
723 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
726 else if (TREE_CODE (rhs
) == CONSTRUCTOR
727 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
728 && (CONSTRUCTOR_NELTS (rhs
)
729 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
731 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
735 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
736 if (TREE_CODE (val
) != INTEGER_CST
737 && TREE_CODE (val
) != REAL_CST
738 && TREE_CODE (val
) != FIXED_CST
)
741 return build_vector_from_ctor (TREE_TYPE (rhs
),
742 CONSTRUCTOR_ELTS (rhs
));
745 else if (DECL_P (rhs
))
746 return unshare_expr (get_symbol_constant_value (rhs
));
748 /* If we couldn't fold the RHS, hand over to the generic
750 if (result
== NULL_TREE
)
753 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
754 that may have been added by fold, and "useless" type
755 conversions that might now be apparent due to propagation. */
756 STRIP_USELESS_TYPE_CONVERSION (result
);
758 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
765 case GIMPLE_UNARY_RHS
:
767 tree rhs
= gimple_assign_rhs1 (stmt
);
769 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
772 /* If the operation was a conversion do _not_ mark a
773 resulting constant with TREE_OVERFLOW if the original
774 constant was not. These conversions have implementation
775 defined behavior and retaining the TREE_OVERFLOW flag
776 here would confuse later passes such as VRP. */
777 if (CONVERT_EXPR_CODE_P (subcode
)
778 && TREE_CODE (result
) == INTEGER_CST
779 && TREE_CODE (rhs
) == INTEGER_CST
)
780 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
782 STRIP_USELESS_TYPE_CONVERSION (result
);
783 if (valid_gimple_rhs_p (result
))
786 else if (CONVERT_EXPR_CODE_P (subcode
)
787 && POINTER_TYPE_P (gimple_expr_type (stmt
))
788 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
790 tree type
= gimple_expr_type (stmt
);
791 tree t
= maybe_fold_offset_to_address (loc
,
792 gimple_assign_rhs1 (stmt
),
793 integer_zero_node
, type
);
800 case GIMPLE_BINARY_RHS
:
801 /* Try to fold pointer addition. */
802 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
804 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
805 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
807 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
808 if (!useless_type_conversion_p
809 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
810 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
812 result
= maybe_fold_stmt_addition (gimple_location (stmt
),
814 gimple_assign_rhs1 (stmt
),
815 gimple_assign_rhs2 (stmt
));
819 result
= fold_binary_loc (loc
, subcode
,
820 TREE_TYPE (gimple_assign_lhs (stmt
)),
821 gimple_assign_rhs1 (stmt
),
822 gimple_assign_rhs2 (stmt
));
826 STRIP_USELESS_TYPE_CONVERSION (result
);
827 if (valid_gimple_rhs_p (result
))
830 /* Fold might have produced non-GIMPLE, so if we trust it blindly
831 we lose canonicalization opportunities. Do not go again
832 through fold here though, or the same non-GIMPLE will be
834 if (commutative_tree_code (subcode
)
835 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
836 gimple_assign_rhs2 (stmt
), false))
837 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
838 gimple_assign_rhs2 (stmt
),
839 gimple_assign_rhs1 (stmt
));
843 case GIMPLE_TERNARY_RHS
:
844 result
= fold_ternary_loc (loc
, subcode
,
845 TREE_TYPE (gimple_assign_lhs (stmt
)),
846 gimple_assign_rhs1 (stmt
),
847 gimple_assign_rhs2 (stmt
),
848 gimple_assign_rhs3 (stmt
));
852 STRIP_USELESS_TYPE_CONVERSION (result
);
853 if (valid_gimple_rhs_p (result
))
856 /* Fold might have produced non-GIMPLE, so if we trust it blindly
857 we lose canonicalization opportunities. Do not go again
858 through fold here though, or the same non-GIMPLE will be
860 if (commutative_ternary_tree_code (subcode
)
861 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
862 gimple_assign_rhs2 (stmt
), false))
863 return build3 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
864 gimple_assign_rhs2 (stmt
),
865 gimple_assign_rhs1 (stmt
),
866 gimple_assign_rhs3 (stmt
));
870 case GIMPLE_INVALID_RHS
:
877 /* Attempt to fold a conditional statement. Return true if any changes were
878 made. We only attempt to fold the condition expression, and do not perform
879 any transformation that would require alteration of the cfg. It is
880 assumed that the operands have been previously folded. */
883 fold_gimple_cond (gimple stmt
)
885 tree result
= fold_binary_loc (gimple_location (stmt
),
886 gimple_cond_code (stmt
),
888 gimple_cond_lhs (stmt
),
889 gimple_cond_rhs (stmt
));
893 STRIP_USELESS_TYPE_CONVERSION (result
);
894 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
896 gimple_cond_set_condition_from_tree (stmt
, result
);
904 /* Convert EXPR into a GIMPLE value suitable for substitution on the
905 RHS of an assignment. Insert the necessary statements before
906 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
907 is replaced. If the call is expected to produces a result, then it
908 is replaced by an assignment of the new RHS to the result variable.
909 If the result is to be ignored, then the call is replaced by a
910 GIMPLE_NOP. A proper VDEF chain is retained by making the first
911 VUSE and the last VDEF of the whole sequence be the same as the replaced
912 statement and using new SSA names for stores in between. */
915 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
918 tree tmp
= NULL_TREE
; /* Silence warning. */
919 gimple stmt
, new_stmt
;
920 gimple_stmt_iterator i
;
921 gimple_seq stmts
= gimple_seq_alloc();
922 struct gimplify_ctx gctx
;
924 gimple laststore
= NULL
;
927 stmt
= gsi_stmt (*si_p
);
929 gcc_assert (is_gimple_call (stmt
));
931 lhs
= gimple_call_lhs (stmt
);
932 reaching_vuse
= gimple_vuse (stmt
);
934 push_gimplify_context (&gctx
);
936 if (lhs
== NULL_TREE
)
938 gimplify_and_add (expr
, &stmts
);
939 /* We can end up with folding a memcpy of an empty class assignment
940 which gets optimized away by C++ gimplification. */
941 if (gimple_seq_empty_p (stmts
))
943 pop_gimplify_context (NULL
);
944 if (gimple_in_ssa_p (cfun
))
946 unlink_stmt_vdef (stmt
);
949 gsi_remove (si_p
, true);
954 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
956 pop_gimplify_context (NULL
);
958 if (gimple_has_location (stmt
))
959 annotate_all_with_location (stmts
, gimple_location (stmt
));
961 /* The replacement can expose previously unreferenced variables. */
962 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
966 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
969 new_stmt
= gsi_stmt (i
);
970 if (gimple_in_ssa_p (cfun
))
972 find_new_referenced_vars (new_stmt
);
973 mark_symbols_for_renaming (new_stmt
);
975 /* If the new statement has a VUSE, update it with exact SSA name we
976 know will reach this one. */
977 if (gimple_vuse (new_stmt
))
979 /* If we've also seen a previous store create a new VDEF for
980 the latter one, and make that the new reaching VUSE. */
983 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
984 gimple_set_vdef (laststore
, reaching_vuse
);
985 update_stmt (laststore
);
988 gimple_set_vuse (new_stmt
, reaching_vuse
);
989 gimple_set_modified (new_stmt
, true);
991 if (gimple_assign_single_p (new_stmt
)
992 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
994 laststore
= new_stmt
;
999 if (lhs
== NULL_TREE
)
1001 /* If we replace a call without LHS that has a VDEF and our new
1002 sequence ends with a store we must make that store have the same
1003 vdef in order not to break the sequencing. This can happen
1004 for instance when folding memcpy calls into assignments. */
1005 if (gimple_vdef (stmt
) && laststore
)
1007 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
1008 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1009 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
1010 update_stmt (laststore
);
1012 else if (gimple_in_ssa_p (cfun
))
1014 unlink_stmt_vdef (stmt
);
1015 release_defs (stmt
);
1023 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
1026 if (laststore
&& is_gimple_reg (lhs
))
1028 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
1029 update_stmt (laststore
);
1030 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1031 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
1036 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
1037 gimple_set_vdef (laststore
, reaching_vuse
);
1038 update_stmt (laststore
);
1041 new_stmt
= gimple_build_assign (lhs
, tmp
);
1042 if (!is_gimple_reg (tmp
))
1043 gimple_set_vuse (new_stmt
, reaching_vuse
);
1044 if (!is_gimple_reg (lhs
))
1046 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1047 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
1048 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = new_stmt
;
1050 else if (reaching_vuse
== gimple_vuse (stmt
))
1051 unlink_stmt_vdef (stmt
);
1054 gimple_set_location (new_stmt
, gimple_location (stmt
));
1055 gsi_replace (si_p
, new_stmt
, false);
1058 /* Return the string length, maximum string length or maximum value of
1060 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1061 is not NULL and, for TYPE == 0, its value is not equal to the length
1062 we determine or if we are unable to determine the length or value,
1063 return false. VISITED is a bitmap of visited variables.
1064 TYPE is 0 if string length should be returned, 1 for maximum string
1065 length and 2 for maximum value ARG can have. */
1068 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
1073 if (TREE_CODE (arg
) != SSA_NAME
)
1075 if (TREE_CODE (arg
) == COND_EXPR
)
1076 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
1077 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
1078 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1079 else if (TREE_CODE (arg
) == ADDR_EXPR
1080 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1081 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1083 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1084 if (TREE_CODE (aop0
) == INDIRECT_REF
1085 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1086 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1087 length
, visited
, type
);
1093 if (TREE_CODE (val
) != INTEGER_CST
1094 || tree_int_cst_sgn (val
) < 0)
1098 val
= c_strlen (arg
, 1);
1106 if (TREE_CODE (*length
) != INTEGER_CST
1107 || TREE_CODE (val
) != INTEGER_CST
)
1110 if (tree_int_cst_lt (*length
, val
))
1114 else if (simple_cst_equal (val
, *length
) != 1)
1122 /* If we were already here, break the infinite cycle. */
1123 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
1127 def_stmt
= SSA_NAME_DEF_STMT (var
);
1129 switch (gimple_code (def_stmt
))
1132 /* The RHS of the statement defining VAR must either have a
1133 constant length or come from another SSA_NAME with a constant
1135 if (gimple_assign_single_p (def_stmt
)
1136 || gimple_assign_unary_nop_p (def_stmt
))
1138 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1139 return get_maxval_strlen (rhs
, length
, visited
, type
);
1145 /* All the arguments of the PHI node must have the same constant
1149 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1151 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1153 /* If this PHI has itself as an argument, we cannot
1154 determine the string length of this argument. However,
1155 if we can find a constant string length for the other
1156 PHI args then we can still be sure that this is a
1157 constant string length. So be optimistic and just
1158 continue with the next argument. */
1159 if (arg
== gimple_phi_result (def_stmt
))
1162 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1174 /* Fold builtin call in statement STMT. Returns a simplified tree.
1175 We may return a non-constant expression, including another call
1176 to a different function and with different arguments, e.g.,
1177 substituting memcpy for strcpy when the string length is known.
1178 Note that some builtins expand into inline code that may not
1179 be valid in GIMPLE. Callers must take care. */
1182 gimple_fold_builtin (gimple stmt
)
1184 tree result
, val
[3];
1190 location_t loc
= gimple_location (stmt
);
1192 gcc_assert (is_gimple_call (stmt
));
1194 ignore
= (gimple_call_lhs (stmt
) == NULL
);
1196 /* First try the generic builtin folder. If that succeeds, return the
1198 result
= fold_call_stmt (stmt
, ignore
);
1202 STRIP_NOPS (result
);
1206 /* Ignore MD builtins. */
1207 callee
= gimple_call_fndecl (stmt
);
1208 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1211 /* If the builtin could not be folded, and it has no argument list,
1213 nargs
= gimple_call_num_args (stmt
);
1217 /* Limit the work only for builtins we know how to simplify. */
1218 switch (DECL_FUNCTION_CODE (callee
))
1220 case BUILT_IN_STRLEN
:
1221 case BUILT_IN_FPUTS
:
1222 case BUILT_IN_FPUTS_UNLOCKED
:
1226 case BUILT_IN_STRCPY
:
1227 case BUILT_IN_STRNCPY
:
1231 case BUILT_IN_MEMCPY_CHK
:
1232 case BUILT_IN_MEMPCPY_CHK
:
1233 case BUILT_IN_MEMMOVE_CHK
:
1234 case BUILT_IN_MEMSET_CHK
:
1235 case BUILT_IN_STRNCPY_CHK
:
1239 case BUILT_IN_STRCPY_CHK
:
1240 case BUILT_IN_STPCPY_CHK
:
1244 case BUILT_IN_SNPRINTF_CHK
:
1245 case BUILT_IN_VSNPRINTF_CHK
:
1253 if (arg_idx
>= nargs
)
1256 /* Try to use the dataflow information gathered by the CCP process. */
1257 visited
= BITMAP_ALLOC (NULL
);
1258 bitmap_clear (visited
);
1260 memset (val
, 0, sizeof (val
));
1261 a
= gimple_call_arg (stmt
, arg_idx
);
1262 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
1263 val
[arg_idx
] = NULL_TREE
;
1265 BITMAP_FREE (visited
);
1268 switch (DECL_FUNCTION_CODE (callee
))
1270 case BUILT_IN_STRLEN
:
1271 if (val
[0] && nargs
== 1)
1274 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
1276 /* If the result is not a valid gimple value, or not a cast
1277 of a valid gimple value, then we cannot use the result. */
1278 if (is_gimple_val (new_val
)
1279 || (CONVERT_EXPR_P (new_val
)
1280 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
1285 case BUILT_IN_STRCPY
:
1286 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
1287 result
= fold_builtin_strcpy (loc
, callee
,
1288 gimple_call_arg (stmt
, 0),
1289 gimple_call_arg (stmt
, 1),
1293 case BUILT_IN_STRNCPY
:
1294 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1295 result
= fold_builtin_strncpy (loc
, callee
,
1296 gimple_call_arg (stmt
, 0),
1297 gimple_call_arg (stmt
, 1),
1298 gimple_call_arg (stmt
, 2),
1302 case BUILT_IN_FPUTS
:
1304 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1305 gimple_call_arg (stmt
, 1),
1306 ignore
, false, val
[0]);
1309 case BUILT_IN_FPUTS_UNLOCKED
:
1311 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1312 gimple_call_arg (stmt
, 1),
1313 ignore
, true, val
[0]);
1316 case BUILT_IN_MEMCPY_CHK
:
1317 case BUILT_IN_MEMPCPY_CHK
:
1318 case BUILT_IN_MEMMOVE_CHK
:
1319 case BUILT_IN_MEMSET_CHK
:
1320 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1321 result
= fold_builtin_memory_chk (loc
, callee
,
1322 gimple_call_arg (stmt
, 0),
1323 gimple_call_arg (stmt
, 1),
1324 gimple_call_arg (stmt
, 2),
1325 gimple_call_arg (stmt
, 3),
1327 DECL_FUNCTION_CODE (callee
));
1330 case BUILT_IN_STRCPY_CHK
:
1331 case BUILT_IN_STPCPY_CHK
:
1332 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1333 result
= fold_builtin_stxcpy_chk (loc
, callee
,
1334 gimple_call_arg (stmt
, 0),
1335 gimple_call_arg (stmt
, 1),
1336 gimple_call_arg (stmt
, 2),
1338 DECL_FUNCTION_CODE (callee
));
1341 case BUILT_IN_STRNCPY_CHK
:
1342 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1343 result
= fold_builtin_strncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1344 gimple_call_arg (stmt
, 1),
1345 gimple_call_arg (stmt
, 2),
1346 gimple_call_arg (stmt
, 3),
1350 case BUILT_IN_SNPRINTF_CHK
:
1351 case BUILT_IN_VSNPRINTF_CHK
:
1352 if (val
[1] && is_gimple_val (val
[1]))
1353 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1354 DECL_FUNCTION_CODE (callee
));
1361 if (result
&& ignore
)
1362 result
= fold_ignored_result (result
);
1366 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1367 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1368 KNOWN_BINFO carries the binfo describing the true type of
1369 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1370 with a this adjustment, the constant which should be added to this pointer
1371 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1372 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1375 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
1376 tree
*delta
, bool refuse_thunks
)
1380 struct cgraph_node
*node
;
1382 v
= BINFO_VIRTUALS (known_binfo
);
1383 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1389 i
+= (TARGET_VTABLE_USES_DESCRIPTORS
1390 ? TARGET_VTABLE_USES_DESCRIPTORS
: 1);
1394 /* If BV_VCALL_INDEX is non-NULL, give up. */
1398 fndecl
= TREE_VALUE (v
);
1399 node
= cgraph_get_node_or_alias (fndecl
);
1402 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1403 thunks are represented by a constant in TREE_PURPOSE of items in
1404 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1407 FIXME: Remove the following condition once we are able to represent
1408 thunk information on call graph edges. */
1409 || (node
->same_body_alias
&& node
->thunk
.thunk_p
)))
1412 /* When cgraph node is missing and function is not public, we cannot
1413 devirtualize. This can happen in WHOPR when the actual method
1414 ends up in other partition, because we found devirtualization
1415 possibility too late. */
1416 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v
)))
1419 *delta
= TREE_PURPOSE (v
);
1420 gcc_checking_assert (host_integerp (*delta
, 0));
1424 /* Generate code adjusting the first parameter of a call statement determined
1428 gimple_adjust_this_by_delta (gimple_stmt_iterator
*gsi
, tree delta
)
1430 gimple call_stmt
= gsi_stmt (*gsi
);
1434 delta
= fold_convert (sizetype
, delta
);
1435 gcc_assert (gimple_call_num_args (call_stmt
) >= 1);
1436 parm
= gimple_call_arg (call_stmt
, 0);
1437 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm
)));
1438 tmp
= create_tmp_var (TREE_TYPE (parm
), NULL
);
1439 add_referenced_var (tmp
);
1441 tmp
= make_ssa_name (tmp
, NULL
);
1442 new_stmt
= gimple_build_assign_with_ops (POINTER_PLUS_EXPR
, tmp
, parm
, delta
);
1443 SSA_NAME_DEF_STMT (tmp
) = new_stmt
;
1444 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1445 gimple_call_set_arg (call_stmt
, 0, tmp
);
1448 /* Return a binfo to be used for devirtualization of calls based on an object
1449 represented by a declaration (i.e. a global or automatically allocated one)
1450 or NULL if it cannot be found or is not safe. CST is expected to be an
1451 ADDR_EXPR of such object or the function will return NULL. Currently it is
1452 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1455 gimple_extract_devirt_binfo_from_cst (tree cst
)
1457 HOST_WIDE_INT offset
, size
, max_size
;
1458 tree base
, type
, expected_type
, binfo
;
1459 bool last_artificial
= false;
1461 if (!flag_devirtualize
1462 || TREE_CODE (cst
) != ADDR_EXPR
1463 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1466 cst
= TREE_OPERAND (cst
, 0);
1467 expected_type
= TREE_TYPE (cst
);
1468 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1469 type
= TREE_TYPE (base
);
1473 || TREE_CODE (type
) != RECORD_TYPE
)
1476 /* Find the sub-object the constant actually refers to and mark whether it is
1477 an artificial one (as opposed to a user-defined one). */
1480 HOST_WIDE_INT pos
, size
;
1483 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (expected_type
))
1488 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1490 if (TREE_CODE (fld
) != FIELD_DECL
)
1493 pos
= int_bit_position (fld
);
1494 size
= tree_low_cst (DECL_SIZE (fld
), 1);
1495 if (pos
<= offset
&& (pos
+ size
) > offset
)
1498 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1501 last_artificial
= DECL_ARTIFICIAL (fld
);
1502 type
= TREE_TYPE (fld
);
1505 /* Artifical sub-objects are ancestors, we do not want to use them for
1506 devirtualization, at least not here. */
1507 if (last_artificial
)
1509 binfo
= TYPE_BINFO (type
);
1510 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1516 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1517 The statement may be replaced by another statement, e.g., if the call
1518 simplifies to a constant value. Return true if any changes were made.
1519 It is assumed that the operands have been previously folded. */
1522 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1524 gimple stmt
= gsi_stmt (*gsi
);
1527 /* Check for builtins that CCP can handle using information not
1528 available in the generic fold routines. */
1529 callee
= gimple_call_fndecl (stmt
);
1530 if (!inplace
&& callee
&& DECL_BUILT_IN (callee
))
1532 tree result
= gimple_fold_builtin (stmt
);
1536 if (!update_call_from_tree (gsi
, result
))
1537 gimplify_and_update_call_from_tree (gsi
, result
);
1542 /* Check for virtual calls that became direct calls. */
1543 callee
= gimple_call_fn (stmt
);
1544 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1546 tree binfo
, fndecl
, delta
, obj
;
1547 HOST_WIDE_INT token
;
1549 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1551 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1555 obj
= OBJ_TYPE_REF_OBJECT (callee
);
1556 binfo
= gimple_extract_devirt_binfo_from_cst (obj
);
1559 token
= TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1560 fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
, &delta
, false);
1563 gcc_assert (integer_zerop (delta
));
1564 gimple_call_set_fndecl (stmt
, fndecl
);
1571 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1572 distinguishes both cases. */
1575 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1577 bool changed
= false;
1578 gimple stmt
= gsi_stmt (*gsi
);
1581 /* Fold the main computation performed by the statement. */
1582 switch (gimple_code (stmt
))
1586 unsigned old_num_ops
= gimple_num_ops (stmt
);
1587 tree new_rhs
= fold_gimple_assign (gsi
);
1588 tree lhs
= gimple_assign_lhs (stmt
);
1590 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1591 TREE_TYPE (new_rhs
)))
1592 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1595 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1597 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1604 changed
|= fold_gimple_cond (stmt
);
1608 /* Fold *& in call arguments. */
1609 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1610 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1612 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1615 gimple_call_set_arg (stmt
, i
, tmp
);
1619 changed
|= gimple_fold_call (gsi
, inplace
);
1623 /* Fold *& in asm operands. */
1624 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1626 tree link
= gimple_asm_output_op (stmt
, i
);
1627 tree op
= TREE_VALUE (link
);
1628 if (REFERENCE_CLASS_P (op
)
1629 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1631 TREE_VALUE (link
) = op
;
1635 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1637 tree link
= gimple_asm_input_op (stmt
, i
);
1638 tree op
= TREE_VALUE (link
);
1639 if (REFERENCE_CLASS_P (op
)
1640 && (op
= maybe_fold_reference (op
, false)) != NULL_TREE
)
1642 TREE_VALUE (link
) = op
;
1649 if (gimple_debug_bind_p (stmt
))
1651 tree val
= gimple_debug_bind_get_value (stmt
);
1653 && REFERENCE_CLASS_P (val
))
1655 tree tem
= maybe_fold_reference (val
, false);
1658 gimple_debug_bind_set_value (stmt
, tem
);
1668 stmt
= gsi_stmt (*gsi
);
1670 /* Fold *& on the lhs. */
1671 if (gimple_has_lhs (stmt
))
1673 tree lhs
= gimple_get_lhs (stmt
);
1674 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1676 tree new_lhs
= maybe_fold_reference (lhs
, true);
1679 gimple_set_lhs (stmt
, new_lhs
);
1688 /* Fold the statement pointed to by GSI. In some cases, this function may
1689 replace the whole statement with a new one. Returns true iff folding
1691 The statement pointed to by GSI should be in valid gimple form but may
1692 be in unfolded state as resulting from for example constant propagation
1693 which can produce *&x = 0. */
1696 fold_stmt (gimple_stmt_iterator
*gsi
)
1698 return fold_stmt_1 (gsi
, false);
1701 /* Perform the minimal folding on statement STMT. Only operations like
1702 *&x created by constant propagation are handled. The statement cannot
1703 be replaced with a new one. Return true if the statement was
1704 changed, false otherwise.
1705 The statement STMT should be in valid gimple form but may
1706 be in unfolded state as resulting from for example constant propagation
1707 which can produce *&x = 0. */
1710 fold_stmt_inplace (gimple stmt
)
1712 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1713 bool changed
= fold_stmt_1 (&gsi
, true);
1714 gcc_assert (gsi_stmt (gsi
) == stmt
);
1718 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1719 if EXPR is null or we don't know how.
1720 If non-null, the result always has boolean type. */
1723 canonicalize_bool (tree expr
, bool invert
)
1729 if (integer_nonzerop (expr
))
1730 return boolean_false_node
;
1731 else if (integer_zerop (expr
))
1732 return boolean_true_node
;
1733 else if (TREE_CODE (expr
) == SSA_NAME
)
1734 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1735 build_int_cst (TREE_TYPE (expr
), 0));
1736 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1737 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1739 TREE_OPERAND (expr
, 0),
1740 TREE_OPERAND (expr
, 1));
1746 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1748 if (integer_nonzerop (expr
))
1749 return boolean_true_node
;
1750 else if (integer_zerop (expr
))
1751 return boolean_false_node
;
1752 else if (TREE_CODE (expr
) == SSA_NAME
)
1753 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1754 build_int_cst (TREE_TYPE (expr
), 0));
1755 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1756 return fold_build2 (TREE_CODE (expr
),
1758 TREE_OPERAND (expr
, 0),
1759 TREE_OPERAND (expr
, 1));
1765 /* Check to see if a boolean expression EXPR is logically equivalent to the
1766 comparison (OP1 CODE OP2). Check for various identities involving
1770 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1771 const_tree op1
, const_tree op2
)
1775 /* The obvious case. */
1776 if (TREE_CODE (expr
) == code
1777 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1778 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1781 /* Check for comparing (name, name != 0) and the case where expr
1782 is an SSA_NAME with a definition matching the comparison. */
1783 if (TREE_CODE (expr
) == SSA_NAME
1784 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1786 if (operand_equal_p (expr
, op1
, 0))
1787 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1788 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1789 s
= SSA_NAME_DEF_STMT (expr
);
1790 if (is_gimple_assign (s
)
1791 && gimple_assign_rhs_code (s
) == code
1792 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1793 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1797 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1798 of name is a comparison, recurse. */
1799 if (TREE_CODE (op1
) == SSA_NAME
1800 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1802 s
= SSA_NAME_DEF_STMT (op1
);
1803 if (is_gimple_assign (s
)
1804 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1806 enum tree_code c
= gimple_assign_rhs_code (s
);
1807 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1808 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1809 return same_bool_comparison_p (expr
, c
,
1810 gimple_assign_rhs1 (s
),
1811 gimple_assign_rhs2 (s
));
1812 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1813 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1814 return same_bool_comparison_p (expr
,
1815 invert_tree_comparison (c
, false),
1816 gimple_assign_rhs1 (s
),
1817 gimple_assign_rhs2 (s
));
1823 /* Check to see if two boolean expressions OP1 and OP2 are logically
1827 same_bool_result_p (const_tree op1
, const_tree op2
)
1829 /* Simple cases first. */
1830 if (operand_equal_p (op1
, op2
, 0))
1833 /* Check the cases where at least one of the operands is a comparison.
1834 These are a bit smarter than operand_equal_p in that they apply some
1835 identifies on SSA_NAMEs. */
1836 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1837 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1838 TREE_OPERAND (op2
, 0),
1839 TREE_OPERAND (op2
, 1)))
1841 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1842 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1843 TREE_OPERAND (op1
, 0),
1844 TREE_OPERAND (op1
, 1)))
1851 /* Forward declarations for some mutually recursive functions. */
1854 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1855 enum tree_code code2
, tree op2a
, tree op2b
);
1857 and_var_with_comparison (tree var
, bool invert
,
1858 enum tree_code code2
, tree op2a
, tree op2b
);
1860 and_var_with_comparison_1 (gimple stmt
,
1861 enum tree_code code2
, tree op2a
, tree op2b
);
1863 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1864 enum tree_code code2
, tree op2a
, tree op2b
);
1866 or_var_with_comparison (tree var
, bool invert
,
1867 enum tree_code code2
, tree op2a
, tree op2b
);
1869 or_var_with_comparison_1 (gimple stmt
,
1870 enum tree_code code2
, tree op2a
, tree op2b
);
1872 /* Helper function for and_comparisons_1: try to simplify the AND of the
1873 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1874 If INVERT is true, invert the value of the VAR before doing the AND.
1875 Return NULL_EXPR if we can't simplify this to a single expression. */
1878 and_var_with_comparison (tree var
, bool invert
,
1879 enum tree_code code2
, tree op2a
, tree op2b
)
1882 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1884 /* We can only deal with variables whose definitions are assignments. */
1885 if (!is_gimple_assign (stmt
))
1888 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1889 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1890 Then we only have to consider the simpler non-inverted cases. */
1892 t
= or_var_with_comparison_1 (stmt
,
1893 invert_tree_comparison (code2
, false),
1896 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1897 return canonicalize_bool (t
, invert
);
1900 /* Try to simplify the AND of the ssa variable defined by the assignment
1901 STMT with the comparison specified by (OP2A CODE2 OP2B).
1902 Return NULL_EXPR if we can't simplify this to a single expression. */
1905 and_var_with_comparison_1 (gimple stmt
,
1906 enum tree_code code2
, tree op2a
, tree op2b
)
1908 tree var
= gimple_assign_lhs (stmt
);
1909 tree true_test_var
= NULL_TREE
;
1910 tree false_test_var
= NULL_TREE
;
1911 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1913 /* Check for identities like (var AND (var == 0)) => false. */
1914 if (TREE_CODE (op2a
) == SSA_NAME
1915 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1917 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1918 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1920 true_test_var
= op2a
;
1921 if (var
== true_test_var
)
1924 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1925 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1927 false_test_var
= op2a
;
1928 if (var
== false_test_var
)
1929 return boolean_false_node
;
1933 /* If the definition is a comparison, recurse on it. */
1934 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1936 tree t
= and_comparisons_1 (innercode
,
1937 gimple_assign_rhs1 (stmt
),
1938 gimple_assign_rhs2 (stmt
),
1946 /* If the definition is an AND or OR expression, we may be able to
1947 simplify by reassociating. */
1948 if (innercode
== TRUTH_AND_EXPR
1949 || innercode
== TRUTH_OR_EXPR
1950 || (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1951 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
)))
1953 tree inner1
= gimple_assign_rhs1 (stmt
);
1954 tree inner2
= gimple_assign_rhs2 (stmt
);
1957 tree partial
= NULL_TREE
;
1958 bool is_and
= (innercode
== TRUTH_AND_EXPR
|| innercode
== BIT_AND_EXPR
);
1960 /* Check for boolean identities that don't require recursive examination
1962 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1963 inner1 AND (inner1 OR inner2) => inner1
1964 !inner1 AND (inner1 AND inner2) => false
1965 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1966 Likewise for similar cases involving inner2. */
1967 if (inner1
== true_test_var
)
1968 return (is_and
? var
: inner1
);
1969 else if (inner2
== true_test_var
)
1970 return (is_and
? var
: inner2
);
1971 else if (inner1
== false_test_var
)
1973 ? boolean_false_node
1974 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1975 else if (inner2
== false_test_var
)
1977 ? boolean_false_node
1978 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1980 /* Next, redistribute/reassociate the AND across the inner tests.
1981 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1982 if (TREE_CODE (inner1
) == SSA_NAME
1983 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1984 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1985 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1986 gimple_assign_rhs1 (s
),
1987 gimple_assign_rhs2 (s
),
1988 code2
, op2a
, op2b
)))
1990 /* Handle the AND case, where we are reassociating:
1991 (inner1 AND inner2) AND (op2a code2 op2b)
1993 If the partial result t is a constant, we win. Otherwise
1994 continue on to try reassociating with the other inner test. */
1997 if (integer_onep (t
))
1999 else if (integer_zerop (t
))
2000 return boolean_false_node
;
2003 /* Handle the OR case, where we are redistributing:
2004 (inner1 OR inner2) AND (op2a code2 op2b)
2005 => (t OR (inner2 AND (op2a code2 op2b))) */
2006 else if (integer_onep (t
))
2007 return boolean_true_node
;
2009 /* Save partial result for later. */
2013 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2014 if (TREE_CODE (inner2
) == SSA_NAME
2015 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2016 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2017 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
2018 gimple_assign_rhs1 (s
),
2019 gimple_assign_rhs2 (s
),
2020 code2
, op2a
, op2b
)))
2022 /* Handle the AND case, where we are reassociating:
2023 (inner1 AND inner2) AND (op2a code2 op2b)
2024 => (inner1 AND t) */
2027 if (integer_onep (t
))
2029 else if (integer_zerop (t
))
2030 return boolean_false_node
;
2031 /* If both are the same, we can apply the identity
2033 else if (partial
&& same_bool_result_p (t
, partial
))
2037 /* Handle the OR case. where we are redistributing:
2038 (inner1 OR inner2) AND (op2a code2 op2b)
2039 => (t OR (inner1 AND (op2a code2 op2b)))
2040 => (t OR partial) */
2043 if (integer_onep (t
))
2044 return boolean_true_node
;
2047 /* We already got a simplification for the other
2048 operand to the redistributed OR expression. The
2049 interesting case is when at least one is false.
2050 Or, if both are the same, we can apply the identity
2052 if (integer_zerop (partial
))
2054 else if (integer_zerop (t
))
2056 else if (same_bool_result_p (t
, partial
))
2065 /* Try to simplify the AND of two comparisons defined by
2066 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2067 If this can be done without constructing an intermediate value,
2068 return the resulting tree; otherwise NULL_TREE is returned.
2069 This function is deliberately asymmetric as it recurses on SSA_DEFs
2070 in the first comparison but not the second. */
2073 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2074 enum tree_code code2
, tree op2a
, tree op2b
)
2076 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2077 if (operand_equal_p (op1a
, op2a
, 0)
2078 && operand_equal_p (op1b
, op2b
, 0))
2080 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2081 TRUTH_ANDIF_EXPR
, code1
, code2
,
2082 boolean_type_node
, op1a
, op1b
);
2087 /* Likewise the swapped case of the above. */
2088 if (operand_equal_p (op1a
, op2b
, 0)
2089 && operand_equal_p (op1b
, op2a
, 0))
2091 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2092 TRUTH_ANDIF_EXPR
, code1
,
2093 swap_tree_comparison (code2
),
2094 boolean_type_node
, op1a
, op1b
);
2099 /* If both comparisons are of the same value against constants, we might
2100 be able to merge them. */
2101 if (operand_equal_p (op1a
, op2a
, 0)
2102 && TREE_CODE (op1b
) == INTEGER_CST
2103 && TREE_CODE (op2b
) == INTEGER_CST
)
2105 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2107 /* If we have (op1a == op1b), we should either be able to
2108 return that or FALSE, depending on whether the constant op1b
2109 also satisfies the other comparison against op2b. */
2110 if (code1
== EQ_EXPR
)
2116 case EQ_EXPR
: val
= (cmp
== 0); break;
2117 case NE_EXPR
: val
= (cmp
!= 0); break;
2118 case LT_EXPR
: val
= (cmp
< 0); break;
2119 case GT_EXPR
: val
= (cmp
> 0); break;
2120 case LE_EXPR
: val
= (cmp
<= 0); break;
2121 case GE_EXPR
: val
= (cmp
>= 0); break;
2122 default: done
= false;
2127 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2129 return boolean_false_node
;
2132 /* Likewise if the second comparison is an == comparison. */
2133 else if (code2
== EQ_EXPR
)
2139 case EQ_EXPR
: val
= (cmp
== 0); break;
2140 case NE_EXPR
: val
= (cmp
!= 0); break;
2141 case LT_EXPR
: val
= (cmp
> 0); break;
2142 case GT_EXPR
: val
= (cmp
< 0); break;
2143 case LE_EXPR
: val
= (cmp
>= 0); break;
2144 case GE_EXPR
: val
= (cmp
<= 0); break;
2145 default: done
= false;
2150 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2152 return boolean_false_node
;
2156 /* Same business with inequality tests. */
2157 else if (code1
== NE_EXPR
)
2162 case EQ_EXPR
: val
= (cmp
!= 0); break;
2163 case NE_EXPR
: val
= (cmp
== 0); break;
2164 case LT_EXPR
: val
= (cmp
>= 0); break;
2165 case GT_EXPR
: val
= (cmp
<= 0); break;
2166 case LE_EXPR
: val
= (cmp
> 0); break;
2167 case GE_EXPR
: val
= (cmp
< 0); break;
2172 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2174 else if (code2
== NE_EXPR
)
2179 case EQ_EXPR
: val
= (cmp
== 0); break;
2180 case NE_EXPR
: val
= (cmp
!= 0); break;
2181 case LT_EXPR
: val
= (cmp
<= 0); break;
2182 case GT_EXPR
: val
= (cmp
>= 0); break;
2183 case LE_EXPR
: val
= (cmp
< 0); break;
2184 case GE_EXPR
: val
= (cmp
> 0); break;
2189 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2192 /* Chose the more restrictive of two < or <= comparisons. */
2193 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2194 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2196 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2197 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2199 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2202 /* Likewise chose the more restrictive of two > or >= comparisons. */
2203 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2204 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2206 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2207 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2209 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2212 /* Check for singleton ranges. */
2214 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
2215 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
2216 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
2218 /* Check for disjoint ranges. */
2220 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2221 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2222 return boolean_false_node
;
2224 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2225 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2226 return boolean_false_node
;
2229 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2230 NAME's definition is a truth value. See if there are any simplifications
2231 that can be done against the NAME's definition. */
2232 if (TREE_CODE (op1a
) == SSA_NAME
2233 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2234 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2236 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2237 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2238 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2239 switch (gimple_code (stmt
))
2242 /* Try to simplify by copy-propagating the definition. */
2243 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2246 /* If every argument to the PHI produces the same result when
2247 ANDed with the second comparison, we win.
2248 Do not do this unless the type is bool since we need a bool
2249 result here anyway. */
2250 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2252 tree result
= NULL_TREE
;
2254 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2256 tree arg
= gimple_phi_arg_def (stmt
, i
);
2258 /* If this PHI has itself as an argument, ignore it.
2259 If all the other args produce the same result,
2261 if (arg
== gimple_phi_result (stmt
))
2263 else if (TREE_CODE (arg
) == INTEGER_CST
)
2265 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
2268 result
= boolean_false_node
;
2269 else if (!integer_zerop (result
))
2273 result
= fold_build2 (code2
, boolean_type_node
,
2275 else if (!same_bool_comparison_p (result
,
2279 else if (TREE_CODE (arg
) == SSA_NAME
)
2281 tree temp
= and_var_with_comparison (arg
, invert
,
2287 else if (!same_bool_result_p (result
, temp
))
2303 /* Try to simplify the AND of two comparisons, specified by
2304 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2305 If this can be simplified to a single expression (without requiring
2306 introducing more SSA variables to hold intermediate values),
2307 return the resulting tree. Otherwise return NULL_TREE.
2308 If the result expression is non-null, it has boolean type. */
2311 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2312 enum tree_code code2
, tree op2a
, tree op2b
)
2314 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2318 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2321 /* Helper function for or_comparisons_1: try to simplify the OR of the
2322 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2323 If INVERT is true, invert the value of VAR before doing the OR.
2324 Return NULL_EXPR if we can't simplify this to a single expression. */
2327 or_var_with_comparison (tree var
, bool invert
,
2328 enum tree_code code2
, tree op2a
, tree op2b
)
2331 gimple stmt
= SSA_NAME_DEF_STMT (var
);
2333 /* We can only deal with variables whose definitions are assignments. */
2334 if (!is_gimple_assign (stmt
))
2337 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2338 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2339 Then we only have to consider the simpler non-inverted cases. */
2341 t
= and_var_with_comparison_1 (stmt
,
2342 invert_tree_comparison (code2
, false),
2345 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2346 return canonicalize_bool (t
, invert
);
2349 /* Try to simplify the OR of the ssa variable defined by the assignment
2350 STMT with the comparison specified by (OP2A CODE2 OP2B).
2351 Return NULL_EXPR if we can't simplify this to a single expression. */
2354 or_var_with_comparison_1 (gimple stmt
,
2355 enum tree_code code2
, tree op2a
, tree op2b
)
2357 tree var
= gimple_assign_lhs (stmt
);
2358 tree true_test_var
= NULL_TREE
;
2359 tree false_test_var
= NULL_TREE
;
2360 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2362 /* Check for identities like (var OR (var != 0)) => true . */
2363 if (TREE_CODE (op2a
) == SSA_NAME
2364 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2366 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2367 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2369 true_test_var
= op2a
;
2370 if (var
== true_test_var
)
2373 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2374 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2376 false_test_var
= op2a
;
2377 if (var
== false_test_var
)
2378 return boolean_true_node
;
2382 /* If the definition is a comparison, recurse on it. */
2383 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2385 tree t
= or_comparisons_1 (innercode
,
2386 gimple_assign_rhs1 (stmt
),
2387 gimple_assign_rhs2 (stmt
),
2395 /* If the definition is an AND or OR expression, we may be able to
2396 simplify by reassociating. */
2397 if (innercode
== TRUTH_AND_EXPR
2398 || innercode
== TRUTH_OR_EXPR
2399 || (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2400 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
)))
2402 tree inner1
= gimple_assign_rhs1 (stmt
);
2403 tree inner2
= gimple_assign_rhs2 (stmt
);
2406 tree partial
= NULL_TREE
;
2407 bool is_or
= (innercode
== TRUTH_OR_EXPR
|| innercode
== BIT_IOR_EXPR
);
2409 /* Check for boolean identities that don't require recursive examination
2411 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2412 inner1 OR (inner1 AND inner2) => inner1
2413 !inner1 OR (inner1 OR inner2) => true
2414 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2416 if (inner1
== true_test_var
)
2417 return (is_or
? var
: inner1
);
2418 else if (inner2
== true_test_var
)
2419 return (is_or
? var
: inner2
);
2420 else if (inner1
== false_test_var
)
2423 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2424 else if (inner2
== false_test_var
)
2427 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2429 /* Next, redistribute/reassociate the OR across the inner tests.
2430 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2431 if (TREE_CODE (inner1
) == SSA_NAME
2432 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2433 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2434 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2435 gimple_assign_rhs1 (s
),
2436 gimple_assign_rhs2 (s
),
2437 code2
, op2a
, op2b
)))
2439 /* Handle the OR case, where we are reassociating:
2440 (inner1 OR inner2) OR (op2a code2 op2b)
2442 If the partial result t is a constant, we win. Otherwise
2443 continue on to try reassociating with the other inner test. */
2446 if (integer_onep (t
))
2447 return boolean_true_node
;
2448 else if (integer_zerop (t
))
2452 /* Handle the AND case, where we are redistributing:
2453 (inner1 AND inner2) OR (op2a code2 op2b)
2454 => (t AND (inner2 OR (op2a code op2b))) */
2455 else if (integer_zerop (t
))
2456 return boolean_false_node
;
2458 /* Save partial result for later. */
2462 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2463 if (TREE_CODE (inner2
) == SSA_NAME
2464 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2465 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2466 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2467 gimple_assign_rhs1 (s
),
2468 gimple_assign_rhs2 (s
),
2469 code2
, op2a
, op2b
)))
2471 /* Handle the OR case, where we are reassociating:
2472 (inner1 OR inner2) OR (op2a code2 op2b)
2474 => (t OR partial) */
2477 if (integer_zerop (t
))
2479 else if (integer_onep (t
))
2480 return boolean_true_node
;
2481 /* If both are the same, we can apply the identity
2483 else if (partial
&& same_bool_result_p (t
, partial
))
2487 /* Handle the AND case, where we are redistributing:
2488 (inner1 AND inner2) OR (op2a code2 op2b)
2489 => (t AND (inner1 OR (op2a code2 op2b)))
2490 => (t AND partial) */
2493 if (integer_zerop (t
))
2494 return boolean_false_node
;
2497 /* We already got a simplification for the other
2498 operand to the redistributed AND expression. The
2499 interesting case is when at least one is true.
2500 Or, if both are the same, we can apply the identity
2502 if (integer_onep (partial
))
2504 else if (integer_onep (t
))
2506 else if (same_bool_result_p (t
, partial
))
2515 /* Try to simplify the OR of two comparisons defined by
2516 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2517 If this can be done without constructing an intermediate value,
2518 return the resulting tree; otherwise NULL_TREE is returned.
2519 This function is deliberately asymmetric as it recurses on SSA_DEFs
2520 in the first comparison but not the second. */
2523 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2524 enum tree_code code2
, tree op2a
, tree op2b
)
2526 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2527 if (operand_equal_p (op1a
, op2a
, 0)
2528 && operand_equal_p (op1b
, op2b
, 0))
2530 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2531 TRUTH_ORIF_EXPR
, code1
, code2
,
2532 boolean_type_node
, op1a
, op1b
);
2537 /* Likewise the swapped case of the above. */
2538 if (operand_equal_p (op1a
, op2b
, 0)
2539 && operand_equal_p (op1b
, op2a
, 0))
2541 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2542 TRUTH_ORIF_EXPR
, code1
,
2543 swap_tree_comparison (code2
),
2544 boolean_type_node
, op1a
, op1b
);
2549 /* If both comparisons are of the same value against constants, we might
2550 be able to merge them. */
2551 if (operand_equal_p (op1a
, op2a
, 0)
2552 && TREE_CODE (op1b
) == INTEGER_CST
2553 && TREE_CODE (op2b
) == INTEGER_CST
)
2555 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2557 /* If we have (op1a != op1b), we should either be able to
2558 return that or TRUE, depending on whether the constant op1b
2559 also satisfies the other comparison against op2b. */
2560 if (code1
== NE_EXPR
)
2566 case EQ_EXPR
: val
= (cmp
== 0); break;
2567 case NE_EXPR
: val
= (cmp
!= 0); break;
2568 case LT_EXPR
: val
= (cmp
< 0); break;
2569 case GT_EXPR
: val
= (cmp
> 0); break;
2570 case LE_EXPR
: val
= (cmp
<= 0); break;
2571 case GE_EXPR
: val
= (cmp
>= 0); break;
2572 default: done
= false;
2577 return boolean_true_node
;
2579 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2582 /* Likewise if the second comparison is a != comparison. */
2583 else if (code2
== NE_EXPR
)
2589 case EQ_EXPR
: val
= (cmp
== 0); break;
2590 case NE_EXPR
: val
= (cmp
!= 0); break;
2591 case LT_EXPR
: val
= (cmp
> 0); break;
2592 case GT_EXPR
: val
= (cmp
< 0); break;
2593 case LE_EXPR
: val
= (cmp
>= 0); break;
2594 case GE_EXPR
: val
= (cmp
<= 0); break;
2595 default: done
= false;
2600 return boolean_true_node
;
2602 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2606 /* See if an equality test is redundant with the other comparison. */
2607 else if (code1
== EQ_EXPR
)
2612 case EQ_EXPR
: val
= (cmp
== 0); break;
2613 case NE_EXPR
: val
= (cmp
!= 0); break;
2614 case LT_EXPR
: val
= (cmp
< 0); break;
2615 case GT_EXPR
: val
= (cmp
> 0); break;
2616 case LE_EXPR
: val
= (cmp
<= 0); break;
2617 case GE_EXPR
: val
= (cmp
>= 0); break;
2622 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2624 else if (code2
== EQ_EXPR
)
2629 case EQ_EXPR
: val
= (cmp
== 0); break;
2630 case NE_EXPR
: val
= (cmp
!= 0); break;
2631 case LT_EXPR
: val
= (cmp
> 0); break;
2632 case GT_EXPR
: val
= (cmp
< 0); break;
2633 case LE_EXPR
: val
= (cmp
>= 0); break;
2634 case GE_EXPR
: val
= (cmp
<= 0); break;
2639 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2642 /* Chose the less restrictive of two < or <= comparisons. */
2643 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2644 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2646 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2647 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2649 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2652 /* Likewise chose the less restrictive of two > or >= comparisons. */
2653 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2654 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2656 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2657 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2659 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2662 /* Check for singleton ranges. */
2664 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2665 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2666 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2668 /* Check for less/greater pairs that don't restrict the range at all. */
2670 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2671 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2672 return boolean_true_node
;
2674 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2675 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2676 return boolean_true_node
;
2679 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2680 NAME's definition is a truth value. See if there are any simplifications
2681 that can be done against the NAME's definition. */
2682 if (TREE_CODE (op1a
) == SSA_NAME
2683 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2684 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2686 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2687 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2688 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2689 switch (gimple_code (stmt
))
2692 /* Try to simplify by copy-propagating the definition. */
2693 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2696 /* If every argument to the PHI produces the same result when
2697 ORed with the second comparison, we win.
2698 Do not do this unless the type is bool since we need a bool
2699 result here anyway. */
2700 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2702 tree result
= NULL_TREE
;
2704 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2706 tree arg
= gimple_phi_arg_def (stmt
, i
);
2708 /* If this PHI has itself as an argument, ignore it.
2709 If all the other args produce the same result,
2711 if (arg
== gimple_phi_result (stmt
))
2713 else if (TREE_CODE (arg
) == INTEGER_CST
)
2715 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2718 result
= boolean_true_node
;
2719 else if (!integer_onep (result
))
2723 result
= fold_build2 (code2
, boolean_type_node
,
2725 else if (!same_bool_comparison_p (result
,
2729 else if (TREE_CODE (arg
) == SSA_NAME
)
2731 tree temp
= or_var_with_comparison (arg
, invert
,
2737 else if (!same_bool_result_p (result
, temp
))
2753 /* Try to simplify the OR of two comparisons, specified by
2754 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2755 If this can be simplified to a single expression (without requiring
2756 introducing more SSA variables to hold intermediate values),
2757 return the resulting tree. Otherwise return NULL_TREE.
2758 If the result expression is non-null, it has boolean type. */
2761 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2762 enum tree_code code2
, tree op2a
, tree op2b
)
2764 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2768 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2772 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2774 Either NULL_TREE, a simplified but non-constant or a constant
2777 ??? This should go into a gimple-fold-inline.h file to be eventually
2778 privatized with the single valueize function used in the various TUs
2779 to avoid the indirect function call overhead. */
2782 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2784 location_t loc
= gimple_location (stmt
);
2785 switch (gimple_code (stmt
))
2789 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2791 switch (get_gimple_rhs_class (subcode
))
2793 case GIMPLE_SINGLE_RHS
:
2795 tree rhs
= gimple_assign_rhs1 (stmt
);
2796 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2798 if (TREE_CODE (rhs
) == SSA_NAME
)
2800 /* If the RHS is an SSA_NAME, return its known constant value,
2802 return (*valueize
) (rhs
);
2804 /* Handle propagating invariant addresses into address
2806 else if (TREE_CODE (rhs
) == ADDR_EXPR
2807 && !is_gimple_min_invariant (rhs
))
2809 HOST_WIDE_INT offset
;
2811 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2815 && (CONSTANT_CLASS_P (base
)
2816 || decl_address_invariant_p (base
)))
2817 return build_invariant_address (TREE_TYPE (rhs
),
2820 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2821 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2822 && (CONSTRUCTOR_NELTS (rhs
)
2823 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2829 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2831 val
= (*valueize
) (val
);
2832 if (TREE_CODE (val
) == INTEGER_CST
2833 || TREE_CODE (val
) == REAL_CST
2834 || TREE_CODE (val
) == FIXED_CST
)
2835 list
= tree_cons (NULL_TREE
, val
, list
);
2840 return build_vector (TREE_TYPE (rhs
), nreverse (list
));
2843 if (kind
== tcc_reference
)
2845 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2846 || TREE_CODE (rhs
) == REALPART_EXPR
2847 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2848 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2850 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2851 return fold_unary_loc (EXPR_LOCATION (rhs
),
2853 TREE_TYPE (rhs
), val
);
2855 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2856 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2858 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2859 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2861 TREE_TYPE (rhs
), val
,
2862 TREE_OPERAND (rhs
, 1),
2863 TREE_OPERAND (rhs
, 2));
2865 else if (TREE_CODE (rhs
) == MEM_REF
2866 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2868 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2869 if (TREE_CODE (val
) == ADDR_EXPR
2870 && is_gimple_min_invariant (val
))
2872 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2874 TREE_OPERAND (rhs
, 1));
2879 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2881 else if (kind
== tcc_declaration
)
2882 return get_symbol_constant_value (rhs
);
2886 case GIMPLE_UNARY_RHS
:
2888 /* Handle unary operators that can appear in GIMPLE form.
2889 Note that we know the single operand must be a constant,
2890 so this should almost always return a simplified RHS. */
2891 tree lhs
= gimple_assign_lhs (stmt
);
2892 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2894 /* Conversions are useless for CCP purposes if they are
2895 value-preserving. Thus the restrictions that
2896 useless_type_conversion_p places for pointer type conversions
2897 do not apply here. Substitution later will only substitute to
2899 if (CONVERT_EXPR_CODE_P (subcode
)
2900 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2901 && POINTER_TYPE_P (TREE_TYPE (op0
)))
2904 /* Try to re-construct array references on-the-fly. */
2905 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
2907 && ((tem
= maybe_fold_offset_to_address
2909 op0
, integer_zero_node
, TREE_TYPE (lhs
)))
2916 fold_unary_ignore_overflow_loc (loc
, subcode
,
2917 gimple_expr_type (stmt
), op0
);
2920 case GIMPLE_BINARY_RHS
:
2922 /* Handle binary operators that can appear in GIMPLE form. */
2923 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2924 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2926 /* Translate &x + CST into an invariant form suitable for
2927 further propagation. */
2928 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2929 && TREE_CODE (op0
) == ADDR_EXPR
2930 && TREE_CODE (op1
) == INTEGER_CST
)
2932 tree off
= fold_convert (ptr_type_node
, op1
);
2933 return build_fold_addr_expr
2934 (fold_build2 (MEM_REF
,
2935 TREE_TYPE (TREE_TYPE (op0
)),
2936 unshare_expr (op0
), off
));
2939 return fold_binary_loc (loc
, subcode
,
2940 gimple_expr_type (stmt
), op0
, op1
);
2943 case GIMPLE_TERNARY_RHS
:
2945 /* Handle ternary operators that can appear in GIMPLE form. */
2946 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2947 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2948 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2950 return fold_ternary_loc (loc
, subcode
,
2951 gimple_expr_type (stmt
), op0
, op1
, op2
);
2963 if (gimple_call_internal_p (stmt
))
2964 /* No folding yet for these functions. */
2967 fn
= (*valueize
) (gimple_call_fn (stmt
));
2968 if (TREE_CODE (fn
) == ADDR_EXPR
2969 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2970 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2972 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2975 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2976 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2977 call
= build_call_array_loc (loc
,
2978 gimple_call_return_type (stmt
),
2979 fn
, gimple_call_num_args (stmt
), args
);
2980 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2982 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2983 STRIP_NOPS (retval
);
2994 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2995 Returns NULL_TREE if folding to a constant is not possible, otherwise
2996 returns a constant according to is_gimple_min_invariant. */
2999 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
3001 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
3002 if (res
&& is_gimple_min_invariant (res
))
3008 /* The following set of functions are supposed to fold references using
3009 their constant initializers. */
3011 static tree
fold_ctor_reference (tree type
, tree ctor
,
3012 unsigned HOST_WIDE_INT offset
,
3013 unsigned HOST_WIDE_INT size
);
3015 /* See if we can find constructor defining value of BASE.
3016 When we know the consructor with constant offset (such as
3017 base is array[40] and we do know constructor of array), then
3018 BIT_OFFSET is adjusted accordingly.
3020 As a special case, return error_mark_node when constructor
3021 is not explicitly available, but it is known to be zero
3022 such as 'static const int a;'. */
3024 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
3025 tree (*valueize
)(tree
))
3027 HOST_WIDE_INT bit_offset2
, size
, max_size
;
3028 if (TREE_CODE (base
) == MEM_REF
)
3030 if (!integer_zerop (TREE_OPERAND (base
, 1)))
3032 if (!host_integerp (TREE_OPERAND (base
, 1), 0))
3034 *bit_offset
+= (mem_ref_offset (base
).low
3039 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
3040 base
= valueize (TREE_OPERAND (base
, 0));
3041 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
3043 base
= TREE_OPERAND (base
, 0);
3046 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
3047 DECL_INITIAL. If BASE is a nested reference into another
3048 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
3049 the inner reference. */
3050 switch (TREE_CODE (base
))
3053 if (!const_value_known_p (base
))
3058 if (!DECL_INITIAL (base
)
3059 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
)))
3060 return error_mark_node
;
3061 return DECL_INITIAL (base
);
3065 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
3066 if (max_size
== -1 || size
!= max_size
)
3068 *bit_offset
+= bit_offset2
;
3069 return get_base_constructor (base
, bit_offset
, valueize
);
3080 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
3081 to the memory at bit OFFSET.
3083 We do only simple job of folding byte accesses. */
3086 fold_string_cst_ctor_reference (tree type
, tree ctor
,
3087 unsigned HOST_WIDE_INT offset
,
3088 unsigned HOST_WIDE_INT size
)
3090 if (INTEGRAL_TYPE_P (type
)
3091 && (TYPE_MODE (type
)
3092 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
3093 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
3095 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
3096 && size
== BITS_PER_UNIT
3097 && !(offset
% BITS_PER_UNIT
))
3099 offset
/= BITS_PER_UNIT
;
3100 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
3101 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
3104 const char a[20]="hello";
3107 might lead to offset greater than string length. In this case we
3108 know value is either initialized to 0 or out of bounds. Return 0
3110 return build_zero_cst (type
);
3115 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3116 SIZE to the memory at bit OFFSET. */
3119 fold_array_ctor_reference (tree type
, tree ctor
,
3120 unsigned HOST_WIDE_INT offset
,
3121 unsigned HOST_WIDE_INT size
)
3123 unsigned HOST_WIDE_INT cnt
;
3125 double_int low_bound
, elt_size
;
3126 double_int index
, max_index
;
3127 double_int access_index
;
3128 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
3129 HOST_WIDE_INT inner_offset
;
3131 /* Compute low bound and elt size. */
3132 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
3134 /* Static constructors for variably sized objects makes no sense. */
3135 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
3136 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
3139 low_bound
= double_int_zero
;
3140 /* Static constructors for variably sized objects makes no sense. */
3141 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
3144 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
3147 /* We can handle only constantly sized accesses that are known to not
3148 be larger than size of array element. */
3149 if (!TYPE_SIZE_UNIT (type
)
3150 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
3151 || double_int_cmp (elt_size
,
3152 tree_to_double_int (TYPE_SIZE_UNIT (type
)), 0) < 0)
3155 /* Compute the array index we look for. */
3156 access_index
= double_int_udiv (uhwi_to_double_int (offset
/ BITS_PER_UNIT
),
3157 elt_size
, TRUNC_DIV_EXPR
);
3158 access_index
= double_int_add (access_index
, low_bound
);
3160 /* And offset within the access. */
3161 inner_offset
= offset
% (double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
);
3163 /* See if the array field is large enough to span whole access. We do not
3164 care to fold accesses spanning multiple array indexes. */
3165 if (inner_offset
+ size
> double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
)
3168 index
= double_int_sub (low_bound
, double_int_one
);
3169 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
3171 /* Array constructor might explicitely set index, or specify range
3172 or leave index NULL meaning that it is next index after previous
3176 if (TREE_CODE (cfield
) == INTEGER_CST
)
3177 max_index
= index
= tree_to_double_int (cfield
);
3180 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
3181 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
3182 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
3186 max_index
= index
= double_int_add (index
, double_int_one
);
3188 /* Do we have match? */
3189 if (double_int_cmp (access_index
, index
, 1) >= 0
3190 && double_int_cmp (access_index
, max_index
, 1) <= 0)
3191 return fold_ctor_reference (type
, cval
, inner_offset
, size
);
3193 /* When memory is not explicitely mentioned in constructor,
3194 it is 0 (or out of range). */
3195 return build_zero_cst (type
);
3198 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3199 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3202 fold_nonarray_ctor_reference (tree type
, tree ctor
,
3203 unsigned HOST_WIDE_INT offset
,
3204 unsigned HOST_WIDE_INT size
)
3206 unsigned HOST_WIDE_INT cnt
;
3209 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
3212 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
3213 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
3214 tree field_size
= DECL_SIZE (cfield
);
3215 double_int bitoffset
;
3216 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
3217 double_int bits_per_unit_cst
= uhwi_to_double_int (BITS_PER_UNIT
);
3218 double_int bitoffset_end
;
3220 /* Variable sized objects in static constructors makes no sense,
3221 but field_size can be NULL for flexible array members. */
3222 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
3223 && TREE_CODE (byte_offset
) == INTEGER_CST
3224 && (field_size
!= NULL_TREE
3225 ? TREE_CODE (field_size
) == INTEGER_CST
3226 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
3228 /* Compute bit offset of the field. */
3229 bitoffset
= double_int_add (tree_to_double_int (field_offset
),
3230 double_int_mul (byte_offset_cst
,
3231 bits_per_unit_cst
));
3232 /* Compute bit offset where the field ends. */
3233 if (field_size
!= NULL_TREE
)
3234 bitoffset_end
= double_int_add (bitoffset
,
3235 tree_to_double_int (field_size
));
3237 bitoffset_end
= double_int_zero
;
3239 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
3240 if (double_int_cmp (uhwi_to_double_int (offset
), bitoffset
, 0) >= 0
3241 && (field_size
== NULL_TREE
3242 || double_int_cmp (uhwi_to_double_int (offset
),
3243 bitoffset_end
, 0) < 0))
3245 double_int access_end
= double_int_add (uhwi_to_double_int (offset
),
3246 uhwi_to_double_int (size
));
3247 double_int inner_offset
= double_int_sub (uhwi_to_double_int (offset
),
3249 /* We do have overlap. Now see if field is large enough to
3250 cover the access. Give up for accesses spanning multiple
3252 if (double_int_cmp (access_end
, bitoffset_end
, 0) > 0)
3254 return fold_ctor_reference (type
, cval
,
3255 double_int_to_uhwi (inner_offset
), size
);
3258 /* When memory is not explicitely mentioned in constructor, it is 0. */
3259 return build_zero_cst (type
);
3262 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3263 to the memory at bit OFFSET. */
3266 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
3267 unsigned HOST_WIDE_INT size
)
3271 /* We found the field with exact match. */
3272 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
3274 return canonicalize_constructor_val (ctor
);
3276 /* We are at the end of walk, see if we can view convert the
3278 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
3279 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3280 && operand_equal_p (TYPE_SIZE (type
),
3281 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
3283 ret
= canonicalize_constructor_val (ctor
);
3284 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
3289 if (TREE_CODE (ctor
) == STRING_CST
)
3290 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
3291 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
3294 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
3295 return fold_array_ctor_reference (type
, ctor
, offset
, size
);
3297 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
);
3303 /* Return the tree representing the element referenced by T if T is an
3304 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3305 names using VALUEIZE. Return NULL_TREE otherwise. */
3308 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
3310 tree ctor
, idx
, base
;
3311 HOST_WIDE_INT offset
, size
, max_size
;
3314 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3315 return get_symbol_constant_value (t
);
3317 tem
= fold_read_from_constant_string (t
);
3321 switch (TREE_CODE (t
))
3324 case ARRAY_RANGE_REF
:
3325 /* Constant indexes are handled well by get_base_constructor.
3326 Only special case variable offsets.
3327 FIXME: This code can't handle nested references with variable indexes
3328 (they will be handled only by iteration of ccp). Perhaps we can bring
3329 get_ref_base_and_extent here and make it use a valueize callback. */
3330 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3332 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3333 && host_integerp (idx
, 0))
3335 tree low_bound
, unit_size
;
3337 /* If the resulting bit-offset is constant, track it. */
3338 if ((low_bound
= array_ref_low_bound (t
),
3339 host_integerp (low_bound
, 0))
3340 && (unit_size
= array_ref_element_size (t
),
3341 host_integerp (unit_size
, 1)))
3343 offset
= TREE_INT_CST_LOW (idx
);
3344 offset
-= TREE_INT_CST_LOW (low_bound
);
3345 offset
*= TREE_INT_CST_LOW (unit_size
);
3346 offset
*= BITS_PER_UNIT
;
3348 base
= TREE_OPERAND (t
, 0);
3349 ctor
= get_base_constructor (base
, &offset
, valueize
);
3350 /* Empty constructor. Always fold to 0. */
3351 if (ctor
== error_mark_node
)
3352 return build_zero_cst (TREE_TYPE (t
));
3353 /* Out of bound array access. Value is undefined,
3357 /* We can not determine ctor. */
3360 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3361 TREE_INT_CST_LOW (unit_size
)
3369 case TARGET_MEM_REF
:
3371 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3372 ctor
= get_base_constructor (base
, &offset
, valueize
);
3374 /* Empty constructor. Always fold to 0. */
3375 if (ctor
== error_mark_node
)
3376 return build_zero_cst (TREE_TYPE (t
));
3377 /* We do not know precise address. */
3378 if (max_size
== -1 || max_size
!= size
)
3380 /* We can not determine ctor. */
3384 /* Out of bound array access. Value is undefined, but don't fold. */
3388 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
);
3393 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3394 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3395 return fold_build1_loc (EXPR_LOCATION (t
),
3396 TREE_CODE (t
), TREE_TYPE (t
), c
);
3408 fold_const_aggregate_ref (tree t
)
3410 return fold_const_aggregate_ref_1 (t
, NULL
);