1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-iterator.h"
33 #include "gimple-ssa.h"
34 #include "tree-ssanames.h"
35 #include "tree-into-ssa.h"
38 #include "tree-ssa-propagate.h"
40 #include "ipa-utils.h"
41 #include "gimple-pretty-print.h"
42 #include "tree-ssa-address.h"
43 #include "langhooks.h"
45 /* Return true when DECL can be referenced from current unit.
46 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
47 We can get declarations that are not possible to reference for various
50 1) When analyzing C++ virtual tables.
51 C++ virtual tables do have known constructors even
52 when they are keyed to other compilation unit.
53 Those tables can contain pointers to methods and vars
54 in other units. Those methods have both STATIC and EXTERNAL
56 2) In WHOPR mode devirtualization might lead to reference
57 to method that was partitioned elsehwere.
58 In this case we have static VAR_DECL or FUNCTION_DECL
59 that has no corresponding callgraph/varpool node
61 3) COMDAT functions referred by external vtables that
62 we devirtualize only during final compilation stage.
63 At this time we already decided that we will not output
64 the function body and thus we can't reference the symbol
68 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
70 struct varpool_node
*vnode
;
71 struct cgraph_node
*node
;
74 if (DECL_ABSTRACT (decl
))
77 /* We are concerned only about static/external vars and functions. */
78 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
79 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
82 /* Static objects can be referred only if they was not optimized out yet. */
83 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
85 snode
= symtab_get_node (decl
);
88 node
= dyn_cast
<cgraph_node
> (snode
);
89 return !node
|| !node
->global
.inlined_to
;
92 /* We will later output the initializer, so we can refer to it.
93 So we are concerned only when DECL comes from initializer of
96 || TREE_CODE (from_decl
) != VAR_DECL
97 || !DECL_EXTERNAL (from_decl
)
99 && symtab_get_node (from_decl
)->in_other_partition
))
101 /* We are folding reference from external vtable. The vtable may reffer
102 to a symbol keyed to other compilation unit. The other compilation
103 unit may be in separate DSO and the symbol may be hidden. */
104 if (DECL_VISIBILITY_SPECIFIED (decl
)
105 && DECL_EXTERNAL (decl
)
106 && (!(snode
= symtab_get_node (decl
)) || !snode
->in_other_partition
))
108 /* When function is public, we always can introduce new reference.
109 Exception are the COMDAT functions where introducing a direct
110 reference imply need to include function body in the curren tunit. */
111 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
113 /* We are not at ltrans stage; so don't worry about WHOPR.
114 Also when still gimplifying all referred comdat functions will be
117 As observed in PR20991 for already optimized out comdat virtual functions
118 it may be tempting to not necessarily give up because the copy will be
119 output elsewhere when corresponding vtable is output.
120 This is however not possible - ABI specify that COMDATs are output in
121 units where they are used and when the other unit was compiled with LTO
122 it is possible that vtable was kept public while the function itself
124 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
127 /* OK we are seeing either COMDAT or static variable. In this case we must
128 check that the definition is still around so we can refer it. */
129 if (TREE_CODE (decl
) == FUNCTION_DECL
)
131 node
= cgraph_get_node (decl
);
132 /* Check that we still have function body and that we didn't took
133 the decision to eliminate offline copy of the function yet.
134 The second is important when devirtualization happens during final
135 compilation stage when making a new reference no longer makes callee
137 if (!node
|| !node
->definition
|| node
->global
.inlined_to
)
139 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
143 else if (TREE_CODE (decl
) == VAR_DECL
)
145 vnode
= varpool_get_node (decl
);
146 if (!vnode
|| !vnode
->definition
)
148 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
155 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
156 acceptable form for is_gimple_min_invariant.
157 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
160 canonicalize_constructor_val (tree cval
, tree from_decl
)
162 tree orig_cval
= cval
;
164 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
165 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
167 tree ptr
= TREE_OPERAND (cval
, 0);
168 if (is_gimple_min_invariant (ptr
))
169 cval
= build1_loc (EXPR_LOCATION (cval
),
170 ADDR_EXPR
, TREE_TYPE (ptr
),
171 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
173 fold_convert (ptr_type_node
,
174 TREE_OPERAND (cval
, 1))));
176 if (TREE_CODE (cval
) == ADDR_EXPR
)
178 tree base
= NULL_TREE
;
179 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
181 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
183 TREE_OPERAND (cval
, 0) = base
;
186 base
= get_base_address (TREE_OPERAND (cval
, 0));
190 if ((TREE_CODE (base
) == VAR_DECL
191 || TREE_CODE (base
) == FUNCTION_DECL
)
192 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
194 if (TREE_CODE (base
) == VAR_DECL
)
195 TREE_ADDRESSABLE (base
) = 1;
196 else if (TREE_CODE (base
) == FUNCTION_DECL
)
198 /* Make sure we create a cgraph node for functions we'll reference.
199 They can be non-existent if the reference comes from an entry
200 of an external vtable for example. */
201 cgraph_get_create_node (base
);
203 /* Fixup types in global initializers. */
204 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
205 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
207 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
208 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
211 if (TREE_OVERFLOW_P (cval
))
212 return drop_tree_overflow (cval
);
216 /* If SYM is a constant variable with known value, return the value.
217 NULL_TREE is returned otherwise. */
220 get_symbol_constant_value (tree sym
)
222 tree val
= ctor_for_folding (sym
);
223 if (val
!= error_mark_node
)
227 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
228 if (val
&& is_gimple_min_invariant (val
))
233 /* Variables declared 'const' without an initializer
234 have zero as the initializer if they may not be
235 overridden at link or run time. */
237 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
238 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
239 return build_zero_cst (TREE_TYPE (sym
));
247 /* Subroutine of fold_stmt. We perform several simplifications of the
248 memory reference tree EXPR and make sure to re-gimplify them properly
249 after propagation of constant addresses. IS_LHS is true if the
250 reference is supposed to be an lvalue. */
253 maybe_fold_reference (tree expr
, bool is_lhs
)
258 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
259 || TREE_CODE (expr
) == REALPART_EXPR
260 || TREE_CODE (expr
) == IMAGPART_EXPR
)
261 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
262 return fold_unary_loc (EXPR_LOCATION (expr
),
265 TREE_OPERAND (expr
, 0));
266 else if (TREE_CODE (expr
) == BIT_FIELD_REF
267 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
268 return fold_ternary_loc (EXPR_LOCATION (expr
),
271 TREE_OPERAND (expr
, 0),
272 TREE_OPERAND (expr
, 1),
273 TREE_OPERAND (expr
, 2));
275 while (handled_component_p (*t
))
276 t
= &TREE_OPERAND (*t
, 0);
278 /* Canonicalize MEM_REFs invariant address operand. Do this first
279 to avoid feeding non-canonical MEM_REFs elsewhere. */
280 if (TREE_CODE (*t
) == MEM_REF
281 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
283 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
284 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
285 TREE_OPERAND (*t
, 0),
286 TREE_OPERAND (*t
, 1));
289 TREE_THIS_VOLATILE (tem
) = volatile_p
;
291 tem
= maybe_fold_reference (expr
, is_lhs
);
299 && (result
= fold_const_aggregate_ref (expr
))
300 && is_gimple_min_invariant (result
))
303 /* Fold back MEM_REFs to reference trees. */
304 if (TREE_CODE (*t
) == MEM_REF
305 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
306 && integer_zerop (TREE_OPERAND (*t
, 1))
307 && (TREE_THIS_VOLATILE (*t
)
308 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
309 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
310 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
311 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
312 /* We have to look out here to not drop a required conversion
313 from the rhs to the lhs if is_lhs, but we don't have the
314 rhs here to verify that. Thus require strict type
316 && types_compatible_p (TREE_TYPE (*t
),
317 TREE_TYPE (TREE_OPERAND
318 (TREE_OPERAND (*t
, 0), 0))))
321 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
322 tem
= maybe_fold_reference (expr
, is_lhs
);
327 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
329 tree tem
= maybe_fold_tmr (*t
);
333 tem
= maybe_fold_reference (expr
, is_lhs
);
344 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
345 replacement rhs for the statement or NULL_TREE if no simplification
346 could be made. It is assumed that the operands have been previously
350 fold_gimple_assign (gimple_stmt_iterator
*si
)
352 gimple stmt
= gsi_stmt (*si
);
353 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
354 location_t loc
= gimple_location (stmt
);
356 tree result
= NULL_TREE
;
358 switch (get_gimple_rhs_class (subcode
))
360 case GIMPLE_SINGLE_RHS
:
362 tree rhs
= gimple_assign_rhs1 (stmt
);
364 if (REFERENCE_CLASS_P (rhs
))
365 return maybe_fold_reference (rhs
, false);
367 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
369 tree ref
= TREE_OPERAND (rhs
, 0);
370 tree tem
= maybe_fold_reference (ref
, true);
372 && TREE_CODE (tem
) == MEM_REF
373 && integer_zerop (TREE_OPERAND (tem
, 1)))
374 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
376 result
= fold_convert (TREE_TYPE (rhs
),
377 build_fold_addr_expr_loc (loc
, tem
));
378 else if (TREE_CODE (ref
) == MEM_REF
379 && integer_zerop (TREE_OPERAND (ref
, 1)))
380 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
383 else if (TREE_CODE (rhs
) == CONSTRUCTOR
384 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
385 && (CONSTRUCTOR_NELTS (rhs
)
386 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
388 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
392 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
393 if (TREE_CODE (val
) != INTEGER_CST
394 && TREE_CODE (val
) != REAL_CST
395 && TREE_CODE (val
) != FIXED_CST
)
398 return build_vector_from_ctor (TREE_TYPE (rhs
),
399 CONSTRUCTOR_ELTS (rhs
));
402 else if (DECL_P (rhs
))
403 return get_symbol_constant_value (rhs
);
405 /* If we couldn't fold the RHS, hand over to the generic
407 if (result
== NULL_TREE
)
410 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
411 that may have been added by fold, and "useless" type
412 conversions that might now be apparent due to propagation. */
413 STRIP_USELESS_TYPE_CONVERSION (result
);
415 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
422 case GIMPLE_UNARY_RHS
:
424 tree rhs
= gimple_assign_rhs1 (stmt
);
426 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
429 /* If the operation was a conversion do _not_ mark a
430 resulting constant with TREE_OVERFLOW if the original
431 constant was not. These conversions have implementation
432 defined behavior and retaining the TREE_OVERFLOW flag
433 here would confuse later passes such as VRP. */
434 if (CONVERT_EXPR_CODE_P (subcode
)
435 && TREE_CODE (result
) == INTEGER_CST
436 && TREE_CODE (rhs
) == INTEGER_CST
)
437 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
439 STRIP_USELESS_TYPE_CONVERSION (result
);
440 if (valid_gimple_rhs_p (result
))
446 case GIMPLE_BINARY_RHS
:
447 /* Try to canonicalize for boolean-typed X the comparisons
448 X == 0, X == 1, X != 0, and X != 1. */
449 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
450 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
452 tree lhs
= gimple_assign_lhs (stmt
);
453 tree op1
= gimple_assign_rhs1 (stmt
);
454 tree op2
= gimple_assign_rhs2 (stmt
);
455 tree type
= TREE_TYPE (op1
);
457 /* Check whether the comparison operands are of the same boolean
458 type as the result type is.
459 Check that second operand is an integer-constant with value
461 if (TREE_CODE (op2
) == INTEGER_CST
462 && (integer_zerop (op2
) || integer_onep (op2
))
463 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
465 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
466 bool is_logical_not
= false;
468 /* X == 0 and X != 1 is a logical-not.of X
469 X == 1 and X != 0 is X */
470 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
471 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
472 is_logical_not
= true;
474 if (is_logical_not
== false)
476 /* Only for one-bit precision typed X the transformation
477 !X -> ~X is valied. */
478 else if (TYPE_PRECISION (type
) == 1)
479 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
481 /* Otherwise we use !X -> X ^ 1. */
483 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
484 type
, op1
, build_int_cst (type
, 1));
490 result
= fold_binary_loc (loc
, subcode
,
491 TREE_TYPE (gimple_assign_lhs (stmt
)),
492 gimple_assign_rhs1 (stmt
),
493 gimple_assign_rhs2 (stmt
));
497 STRIP_USELESS_TYPE_CONVERSION (result
);
498 if (valid_gimple_rhs_p (result
))
503 case GIMPLE_TERNARY_RHS
:
504 /* Try to fold a conditional expression. */
505 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
507 tree op0
= gimple_assign_rhs1 (stmt
);
510 location_t cond_loc
= gimple_location (stmt
);
512 if (COMPARISON_CLASS_P (op0
))
514 fold_defer_overflow_warnings ();
515 tem
= fold_binary_loc (cond_loc
,
516 TREE_CODE (op0
), TREE_TYPE (op0
),
517 TREE_OPERAND (op0
, 0),
518 TREE_OPERAND (op0
, 1));
519 /* This is actually a conditional expression, not a GIMPLE
520 conditional statement, however, the valid_gimple_rhs_p
521 test still applies. */
522 set
= (tem
&& is_gimple_condexpr (tem
)
523 && valid_gimple_rhs_p (tem
));
524 fold_undefer_overflow_warnings (set
, stmt
, 0);
526 else if (is_gimple_min_invariant (op0
))
535 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
536 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
537 gimple_assign_rhs2 (stmt
),
538 gimple_assign_rhs3 (stmt
));
542 result
= fold_ternary_loc (loc
, subcode
,
543 TREE_TYPE (gimple_assign_lhs (stmt
)),
544 gimple_assign_rhs1 (stmt
),
545 gimple_assign_rhs2 (stmt
),
546 gimple_assign_rhs3 (stmt
));
550 STRIP_USELESS_TYPE_CONVERSION (result
);
551 if (valid_gimple_rhs_p (result
))
556 case GIMPLE_INVALID_RHS
:
563 /* Attempt to fold a conditional statement. Return true if any changes were
564 made. We only attempt to fold the condition expression, and do not perform
565 any transformation that would require alteration of the cfg. It is
566 assumed that the operands have been previously folded. */
569 fold_gimple_cond (gimple stmt
)
571 tree result
= fold_binary_loc (gimple_location (stmt
),
572 gimple_cond_code (stmt
),
574 gimple_cond_lhs (stmt
),
575 gimple_cond_rhs (stmt
));
579 STRIP_USELESS_TYPE_CONVERSION (result
);
580 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
582 gimple_cond_set_condition_from_tree (stmt
, result
);
590 /* Convert EXPR into a GIMPLE value suitable for substitution on the
591 RHS of an assignment. Insert the necessary statements before
592 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
593 is replaced. If the call is expected to produces a result, then it
594 is replaced by an assignment of the new RHS to the result variable.
595 If the result is to be ignored, then the call is replaced by a
596 GIMPLE_NOP. A proper VDEF chain is retained by making the first
597 VUSE and the last VDEF of the whole sequence be the same as the replaced
598 statement and using new SSA names for stores in between. */
601 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
604 gimple stmt
, new_stmt
;
605 gimple_stmt_iterator i
;
606 gimple_seq stmts
= NULL
;
607 struct gimplify_ctx gctx
;
611 stmt
= gsi_stmt (*si_p
);
613 gcc_assert (is_gimple_call (stmt
));
615 push_gimplify_context (&gctx
);
616 gctx
.into_ssa
= gimple_in_ssa_p (cfun
);
618 lhs
= gimple_call_lhs (stmt
);
619 if (lhs
== NULL_TREE
)
621 gimplify_and_add (expr
, &stmts
);
622 /* We can end up with folding a memcpy of an empty class assignment
623 which gets optimized away by C++ gimplification. */
624 if (gimple_seq_empty_p (stmts
))
626 pop_gimplify_context (NULL
);
627 if (gimple_in_ssa_p (cfun
))
629 unlink_stmt_vdef (stmt
);
632 gsi_replace (si_p
, gimple_build_nop (), true);
638 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
639 new_stmt
= gimple_build_assign (lhs
, tmp
);
640 i
= gsi_last (stmts
);
641 gsi_insert_after_without_update (&i
, new_stmt
,
642 GSI_CONTINUE_LINKING
);
645 pop_gimplify_context (NULL
);
647 if (gimple_has_location (stmt
))
648 annotate_all_with_location (stmts
, gimple_location (stmt
));
650 /* First iterate over the replacement statements backward, assigning
651 virtual operands to their defining statements. */
653 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
655 new_stmt
= gsi_stmt (i
);
656 if ((gimple_assign_single_p (new_stmt
)
657 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
658 || (is_gimple_call (new_stmt
)
659 && (gimple_call_flags (new_stmt
)
660 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
664 vdef
= gimple_vdef (stmt
);
666 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
667 gimple_set_vdef (new_stmt
, vdef
);
668 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
669 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
670 laststore
= new_stmt
;
674 /* Second iterate over the statements forward, assigning virtual
675 operands to their uses. */
676 reaching_vuse
= gimple_vuse (stmt
);
677 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
679 new_stmt
= gsi_stmt (i
);
680 /* If the new statement possibly has a VUSE, update it with exact SSA
681 name we know will reach this one. */
682 if (gimple_has_mem_ops (new_stmt
))
683 gimple_set_vuse (new_stmt
, reaching_vuse
);
684 gimple_set_modified (new_stmt
, true);
685 if (gimple_vdef (new_stmt
))
686 reaching_vuse
= gimple_vdef (new_stmt
);
689 /* If the new sequence does not do a store release the virtual
690 definition of the original statement. */
692 && reaching_vuse
== gimple_vuse (stmt
))
694 tree vdef
= gimple_vdef (stmt
);
696 && TREE_CODE (vdef
) == SSA_NAME
)
698 unlink_stmt_vdef (stmt
);
699 release_ssa_name (vdef
);
703 /* Finally replace the original statement with the sequence. */
704 gsi_replace_with_seq (si_p
, stmts
, false);
707 /* Return the string length, maximum string length or maximum value of
709 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
710 is not NULL and, for TYPE == 0, its value is not equal to the length
711 we determine or if we are unable to determine the length or value,
712 return false. VISITED is a bitmap of visited variables.
713 TYPE is 0 if string length should be returned, 1 for maximum string
714 length and 2 for maximum value ARG can have. */
717 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
722 if (TREE_CODE (arg
) != SSA_NAME
)
724 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
725 if (TREE_CODE (arg
) == ADDR_EXPR
726 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
727 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
729 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
730 if (TREE_CODE (aop0
) == INDIRECT_REF
731 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
732 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
733 length
, visited
, type
);
739 if (TREE_CODE (val
) != INTEGER_CST
740 || tree_int_cst_sgn (val
) < 0)
744 val
= c_strlen (arg
, 1);
752 if (TREE_CODE (*length
) != INTEGER_CST
753 || TREE_CODE (val
) != INTEGER_CST
)
756 if (tree_int_cst_lt (*length
, val
))
760 else if (simple_cst_equal (val
, *length
) != 1)
768 /* If ARG is registered for SSA update we cannot look at its defining
770 if (name_registered_for_update_p (arg
))
773 /* If we were already here, break the infinite cycle. */
774 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
778 def_stmt
= SSA_NAME_DEF_STMT (var
);
780 switch (gimple_code (def_stmt
))
783 /* The RHS of the statement defining VAR must either have a
784 constant length or come from another SSA_NAME with a constant
786 if (gimple_assign_single_p (def_stmt
)
787 || gimple_assign_unary_nop_p (def_stmt
))
789 tree rhs
= gimple_assign_rhs1 (def_stmt
);
790 return get_maxval_strlen (rhs
, length
, visited
, type
);
792 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
794 tree op2
= gimple_assign_rhs2 (def_stmt
);
795 tree op3
= gimple_assign_rhs3 (def_stmt
);
796 return get_maxval_strlen (op2
, length
, visited
, type
)
797 && get_maxval_strlen (op3
, length
, visited
, type
);
803 /* All the arguments of the PHI node must have the same constant
807 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
809 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
811 /* If this PHI has itself as an argument, we cannot
812 determine the string length of this argument. However,
813 if we can find a constant string length for the other
814 PHI args then we can still be sure that this is a
815 constant string length. So be optimistic and just
816 continue with the next argument. */
817 if (arg
== gimple_phi_result (def_stmt
))
820 if (!get_maxval_strlen (arg
, length
, visited
, type
))
832 /* Fold builtin call in statement STMT. Returns a simplified tree.
833 We may return a non-constant expression, including another call
834 to a different function and with different arguments, e.g.,
835 substituting memcpy for strcpy when the string length is known.
836 Note that some builtins expand into inline code that may not
837 be valid in GIMPLE. Callers must take care. */
840 gimple_fold_builtin (gimple stmt
)
848 location_t loc
= gimple_location (stmt
);
850 gcc_assert (is_gimple_call (stmt
));
852 ignore
= (gimple_call_lhs (stmt
) == NULL
);
854 /* First try the generic builtin folder. If that succeeds, return the
856 result
= fold_call_stmt (stmt
, ignore
);
864 /* Ignore MD builtins. */
865 callee
= gimple_call_fndecl (stmt
);
866 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
869 /* Give up for always_inline inline builtins until they are
871 if (avoid_folding_inline_builtin (callee
))
874 /* If the builtin could not be folded, and it has no argument list,
876 nargs
= gimple_call_num_args (stmt
);
880 /* Limit the work only for builtins we know how to simplify. */
881 switch (DECL_FUNCTION_CODE (callee
))
883 case BUILT_IN_STRLEN
:
885 case BUILT_IN_FPUTS_UNLOCKED
:
889 case BUILT_IN_STRCPY
:
890 case BUILT_IN_STRNCPY
:
894 case BUILT_IN_MEMCPY_CHK
:
895 case BUILT_IN_MEMPCPY_CHK
:
896 case BUILT_IN_MEMMOVE_CHK
:
897 case BUILT_IN_MEMSET_CHK
:
898 case BUILT_IN_STRNCPY_CHK
:
899 case BUILT_IN_STPNCPY_CHK
:
903 case BUILT_IN_STRCPY_CHK
:
904 case BUILT_IN_STPCPY_CHK
:
908 case BUILT_IN_SNPRINTF_CHK
:
909 case BUILT_IN_VSNPRINTF_CHK
:
917 if (arg_idx
>= nargs
)
920 /* Try to use the dataflow information gathered by the CCP process. */
921 visited
= BITMAP_ALLOC (NULL
);
922 bitmap_clear (visited
);
924 memset (val
, 0, sizeof (val
));
925 a
= gimple_call_arg (stmt
, arg_idx
);
926 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
927 val
[arg_idx
] = NULL_TREE
;
929 BITMAP_FREE (visited
);
932 switch (DECL_FUNCTION_CODE (callee
))
934 case BUILT_IN_STRLEN
:
935 if (val
[0] && nargs
== 1)
938 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
940 /* If the result is not a valid gimple value, or not a cast
941 of a valid gimple value, then we cannot use the result. */
942 if (is_gimple_val (new_val
)
943 || (CONVERT_EXPR_P (new_val
)
944 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
949 case BUILT_IN_STRCPY
:
950 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
951 result
= fold_builtin_strcpy (loc
, callee
,
952 gimple_call_arg (stmt
, 0),
953 gimple_call_arg (stmt
, 1),
957 case BUILT_IN_STRNCPY
:
958 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
959 result
= fold_builtin_strncpy (loc
, callee
,
960 gimple_call_arg (stmt
, 0),
961 gimple_call_arg (stmt
, 1),
962 gimple_call_arg (stmt
, 2),
968 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
969 gimple_call_arg (stmt
, 1),
970 ignore
, false, val
[0]);
973 case BUILT_IN_FPUTS_UNLOCKED
:
975 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
976 gimple_call_arg (stmt
, 1),
977 ignore
, true, val
[0]);
980 case BUILT_IN_MEMCPY_CHK
:
981 case BUILT_IN_MEMPCPY_CHK
:
982 case BUILT_IN_MEMMOVE_CHK
:
983 case BUILT_IN_MEMSET_CHK
:
984 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
985 result
= fold_builtin_memory_chk (loc
, callee
,
986 gimple_call_arg (stmt
, 0),
987 gimple_call_arg (stmt
, 1),
988 gimple_call_arg (stmt
, 2),
989 gimple_call_arg (stmt
, 3),
991 DECL_FUNCTION_CODE (callee
));
994 case BUILT_IN_STRCPY_CHK
:
995 case BUILT_IN_STPCPY_CHK
:
996 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
997 result
= fold_builtin_stxcpy_chk (loc
, callee
,
998 gimple_call_arg (stmt
, 0),
999 gimple_call_arg (stmt
, 1),
1000 gimple_call_arg (stmt
, 2),
1002 DECL_FUNCTION_CODE (callee
));
1005 case BUILT_IN_STRNCPY_CHK
:
1006 case BUILT_IN_STPNCPY_CHK
:
1007 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1008 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1009 gimple_call_arg (stmt
, 1),
1010 gimple_call_arg (stmt
, 2),
1011 gimple_call_arg (stmt
, 3),
1013 DECL_FUNCTION_CODE (callee
));
1016 case BUILT_IN_SNPRINTF_CHK
:
1017 case BUILT_IN_VSNPRINTF_CHK
:
1018 if (val
[1] && is_gimple_val (val
[1]))
1019 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1020 DECL_FUNCTION_CODE (callee
));
1027 if (result
&& ignore
)
1028 result
= fold_ignored_result (result
);
1033 /* Return a binfo to be used for devirtualization of calls based on an object
1034 represented by a declaration (i.e. a global or automatically allocated one)
1035 or NULL if it cannot be found or is not safe. CST is expected to be an
1036 ADDR_EXPR of such object or the function will return NULL. Currently it is
1037 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1038 EXPECTED_TYPE is type of the class virtual belongs to. */
1041 gimple_extract_devirt_binfo_from_cst (tree cst
, tree expected_type
)
1043 HOST_WIDE_INT offset
, size
, max_size
;
1044 tree base
, type
, binfo
;
1045 bool last_artificial
= false;
1047 if (!flag_devirtualize
1048 || TREE_CODE (cst
) != ADDR_EXPR
1049 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1052 cst
= TREE_OPERAND (cst
, 0);
1053 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1054 type
= TREE_TYPE (base
);
1058 || TREE_CODE (type
) != RECORD_TYPE
)
1061 /* Find the sub-object the constant actually refers to and mark whether it is
1062 an artificial one (as opposed to a user-defined one). */
1065 HOST_WIDE_INT pos
, size
;
1068 if (types_same_for_odr (type
, expected_type
))
1073 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1075 if (TREE_CODE (fld
) != FIELD_DECL
)
1078 pos
= int_bit_position (fld
);
1079 size
= tree_to_uhwi (DECL_SIZE (fld
));
1080 if (pos
<= offset
&& (pos
+ size
) > offset
)
1083 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1086 last_artificial
= DECL_ARTIFICIAL (fld
);
1087 type
= TREE_TYPE (fld
);
1090 /* Artificial sub-objects are ancestors, we do not want to use them for
1091 devirtualization, at least not here. */
1092 if (last_artificial
)
1094 binfo
= TYPE_BINFO (type
);
1095 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1101 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1102 The statement may be replaced by another statement, e.g., if the call
1103 simplifies to a constant value. Return true if any changes were made.
1104 It is assumed that the operands have been previously folded. */
1107 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1109 gimple stmt
= gsi_stmt (*gsi
);
1111 bool changed
= false;
1114 /* Fold *& in call arguments. */
1115 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1116 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1118 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1121 gimple_call_set_arg (stmt
, i
, tmp
);
1126 /* Check for virtual calls that became direct calls. */
1127 callee
= gimple_call_fn (stmt
);
1128 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1130 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1132 if (dump_file
&& virtual_method_call_p (callee
)
1133 && !possible_polymorphic_call_target_p
1134 (callee
, cgraph_get_node (gimple_call_addr_fndecl
1135 (OBJ_TYPE_REF_EXPR (callee
)))))
1138 "Type inheritnace inconsistent devirtualization of ");
1139 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1140 fprintf (dump_file
, " to ");
1141 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
1142 fprintf (dump_file
, "\n");
1145 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1148 else if (virtual_method_call_p (callee
))
1150 tree obj
= OBJ_TYPE_REF_OBJECT (callee
);
1151 tree binfo
= gimple_extract_devirt_binfo_from_cst
1152 (obj
, obj_type_ref_class (callee
));
1156 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1157 tree fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
);
1160 #ifdef ENABLE_CHECKING
1161 gcc_assert (possible_polymorphic_call_target_p
1162 (callee
, cgraph_get_node (fndecl
)));
1165 gimple_call_set_fndecl (stmt
, fndecl
);
1175 /* Check for builtins that CCP can handle using information not
1176 available in the generic fold routines. */
1177 callee
= gimple_call_fndecl (stmt
);
1178 if (callee
&& DECL_BUILT_IN (callee
))
1180 tree result
= gimple_fold_builtin (stmt
);
1183 if (!update_call_from_tree (gsi
, result
))
1184 gimplify_and_update_call_from_tree (gsi
, result
);
1187 else if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1188 changed
|= targetm
.gimple_fold_builtin (gsi
);
1194 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1195 distinguishes both cases. */
1198 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1200 bool changed
= false;
1201 gimple stmt
= gsi_stmt (*gsi
);
1204 /* Fold the main computation performed by the statement. */
1205 switch (gimple_code (stmt
))
1209 unsigned old_num_ops
= gimple_num_ops (stmt
);
1210 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1211 tree lhs
= gimple_assign_lhs (stmt
);
1213 /* First canonicalize operand order. This avoids building new
1214 trees if this is the only thing fold would later do. */
1215 if ((commutative_tree_code (subcode
)
1216 || commutative_ternary_tree_code (subcode
))
1217 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1218 gimple_assign_rhs2 (stmt
), false))
1220 tree tem
= gimple_assign_rhs1 (stmt
);
1221 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1222 gimple_assign_set_rhs2 (stmt
, tem
);
1225 new_rhs
= fold_gimple_assign (gsi
);
1227 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1228 TREE_TYPE (new_rhs
)))
1229 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1232 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1234 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1241 changed
|= fold_gimple_cond (stmt
);
1245 changed
|= gimple_fold_call (gsi
, inplace
);
1249 /* Fold *& in asm operands. */
1252 const char **oconstraints
;
1253 const char *constraint
;
1254 bool allows_mem
, allows_reg
;
1256 noutputs
= gimple_asm_noutputs (stmt
);
1257 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1259 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1261 tree link
= gimple_asm_output_op (stmt
, i
);
1262 tree op
= TREE_VALUE (link
);
1264 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1265 if (REFERENCE_CLASS_P (op
)
1266 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1268 TREE_VALUE (link
) = op
;
1272 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1274 tree link
= gimple_asm_input_op (stmt
, i
);
1275 tree op
= TREE_VALUE (link
);
1277 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1278 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1279 oconstraints
, &allows_mem
, &allows_reg
);
1280 if (REFERENCE_CLASS_P (op
)
1281 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1284 TREE_VALUE (link
) = op
;
1292 if (gimple_debug_bind_p (stmt
))
1294 tree val
= gimple_debug_bind_get_value (stmt
);
1296 && REFERENCE_CLASS_P (val
))
1298 tree tem
= maybe_fold_reference (val
, false);
1301 gimple_debug_bind_set_value (stmt
, tem
);
1306 && TREE_CODE (val
) == ADDR_EXPR
)
1308 tree ref
= TREE_OPERAND (val
, 0);
1309 tree tem
= maybe_fold_reference (ref
, false);
1312 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1313 gimple_debug_bind_set_value (stmt
, tem
);
1323 stmt
= gsi_stmt (*gsi
);
1325 /* Fold *& on the lhs. */
1326 if (gimple_has_lhs (stmt
))
1328 tree lhs
= gimple_get_lhs (stmt
);
1329 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1331 tree new_lhs
= maybe_fold_reference (lhs
, true);
1334 gimple_set_lhs (stmt
, new_lhs
);
1343 /* Fold the statement pointed to by GSI. In some cases, this function may
1344 replace the whole statement with a new one. Returns true iff folding
1346 The statement pointed to by GSI should be in valid gimple form but may
1347 be in unfolded state as resulting from for example constant propagation
1348 which can produce *&x = 0. */
1351 fold_stmt (gimple_stmt_iterator
*gsi
)
1353 return fold_stmt_1 (gsi
, false);
1356 /* Perform the minimal folding on statement *GSI. Only operations like
1357 *&x created by constant propagation are handled. The statement cannot
1358 be replaced with a new one. Return true if the statement was
1359 changed, false otherwise.
1360 The statement *GSI should be in valid gimple form but may
1361 be in unfolded state as resulting from for example constant propagation
1362 which can produce *&x = 0. */
1365 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1367 gimple stmt
= gsi_stmt (*gsi
);
1368 bool changed
= fold_stmt_1 (gsi
, true);
1369 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1373 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1374 if EXPR is null or we don't know how.
1375 If non-null, the result always has boolean type. */
1378 canonicalize_bool (tree expr
, bool invert
)
1384 if (integer_nonzerop (expr
))
1385 return boolean_false_node
;
1386 else if (integer_zerop (expr
))
1387 return boolean_true_node
;
1388 else if (TREE_CODE (expr
) == SSA_NAME
)
1389 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1390 build_int_cst (TREE_TYPE (expr
), 0));
1391 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1392 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1394 TREE_OPERAND (expr
, 0),
1395 TREE_OPERAND (expr
, 1));
1401 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1403 if (integer_nonzerop (expr
))
1404 return boolean_true_node
;
1405 else if (integer_zerop (expr
))
1406 return boolean_false_node
;
1407 else if (TREE_CODE (expr
) == SSA_NAME
)
1408 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1409 build_int_cst (TREE_TYPE (expr
), 0));
1410 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1411 return fold_build2 (TREE_CODE (expr
),
1413 TREE_OPERAND (expr
, 0),
1414 TREE_OPERAND (expr
, 1));
1420 /* Check to see if a boolean expression EXPR is logically equivalent to the
1421 comparison (OP1 CODE OP2). Check for various identities involving
1425 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1426 const_tree op1
, const_tree op2
)
1430 /* The obvious case. */
1431 if (TREE_CODE (expr
) == code
1432 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1433 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1436 /* Check for comparing (name, name != 0) and the case where expr
1437 is an SSA_NAME with a definition matching the comparison. */
1438 if (TREE_CODE (expr
) == SSA_NAME
1439 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1441 if (operand_equal_p (expr
, op1
, 0))
1442 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1443 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1444 s
= SSA_NAME_DEF_STMT (expr
);
1445 if (is_gimple_assign (s
)
1446 && gimple_assign_rhs_code (s
) == code
1447 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1448 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1452 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1453 of name is a comparison, recurse. */
1454 if (TREE_CODE (op1
) == SSA_NAME
1455 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1457 s
= SSA_NAME_DEF_STMT (op1
);
1458 if (is_gimple_assign (s
)
1459 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1461 enum tree_code c
= gimple_assign_rhs_code (s
);
1462 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1463 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1464 return same_bool_comparison_p (expr
, c
,
1465 gimple_assign_rhs1 (s
),
1466 gimple_assign_rhs2 (s
));
1467 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1468 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1469 return same_bool_comparison_p (expr
,
1470 invert_tree_comparison (c
, false),
1471 gimple_assign_rhs1 (s
),
1472 gimple_assign_rhs2 (s
));
1478 /* Check to see if two boolean expressions OP1 and OP2 are logically
1482 same_bool_result_p (const_tree op1
, const_tree op2
)
1484 /* Simple cases first. */
1485 if (operand_equal_p (op1
, op2
, 0))
1488 /* Check the cases where at least one of the operands is a comparison.
1489 These are a bit smarter than operand_equal_p in that they apply some
1490 identifies on SSA_NAMEs. */
1491 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1492 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1493 TREE_OPERAND (op2
, 0),
1494 TREE_OPERAND (op2
, 1)))
1496 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1497 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1498 TREE_OPERAND (op1
, 0),
1499 TREE_OPERAND (op1
, 1)))
1506 /* Forward declarations for some mutually recursive functions. */
1509 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1510 enum tree_code code2
, tree op2a
, tree op2b
);
1512 and_var_with_comparison (tree var
, bool invert
,
1513 enum tree_code code2
, tree op2a
, tree op2b
);
1515 and_var_with_comparison_1 (gimple stmt
,
1516 enum tree_code code2
, tree op2a
, tree op2b
);
1518 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1519 enum tree_code code2
, tree op2a
, tree op2b
);
1521 or_var_with_comparison (tree var
, bool invert
,
1522 enum tree_code code2
, tree op2a
, tree op2b
);
1524 or_var_with_comparison_1 (gimple stmt
,
1525 enum tree_code code2
, tree op2a
, tree op2b
);
1527 /* Helper function for and_comparisons_1: try to simplify the AND of the
1528 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1529 If INVERT is true, invert the value of the VAR before doing the AND.
1530 Return NULL_EXPR if we can't simplify this to a single expression. */
1533 and_var_with_comparison (tree var
, bool invert
,
1534 enum tree_code code2
, tree op2a
, tree op2b
)
1537 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1539 /* We can only deal with variables whose definitions are assignments. */
1540 if (!is_gimple_assign (stmt
))
1543 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1544 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1545 Then we only have to consider the simpler non-inverted cases. */
1547 t
= or_var_with_comparison_1 (stmt
,
1548 invert_tree_comparison (code2
, false),
1551 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1552 return canonicalize_bool (t
, invert
);
1555 /* Try to simplify the AND of the ssa variable defined by the assignment
1556 STMT with the comparison specified by (OP2A CODE2 OP2B).
1557 Return NULL_EXPR if we can't simplify this to a single expression. */
1560 and_var_with_comparison_1 (gimple stmt
,
1561 enum tree_code code2
, tree op2a
, tree op2b
)
1563 tree var
= gimple_assign_lhs (stmt
);
1564 tree true_test_var
= NULL_TREE
;
1565 tree false_test_var
= NULL_TREE
;
1566 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1568 /* Check for identities like (var AND (var == 0)) => false. */
1569 if (TREE_CODE (op2a
) == SSA_NAME
1570 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1572 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1573 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1575 true_test_var
= op2a
;
1576 if (var
== true_test_var
)
1579 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1580 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1582 false_test_var
= op2a
;
1583 if (var
== false_test_var
)
1584 return boolean_false_node
;
1588 /* If the definition is a comparison, recurse on it. */
1589 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1591 tree t
= and_comparisons_1 (innercode
,
1592 gimple_assign_rhs1 (stmt
),
1593 gimple_assign_rhs2 (stmt
),
1601 /* If the definition is an AND or OR expression, we may be able to
1602 simplify by reassociating. */
1603 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1604 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1606 tree inner1
= gimple_assign_rhs1 (stmt
);
1607 tree inner2
= gimple_assign_rhs2 (stmt
);
1610 tree partial
= NULL_TREE
;
1611 bool is_and
= (innercode
== BIT_AND_EXPR
);
1613 /* Check for boolean identities that don't require recursive examination
1615 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1616 inner1 AND (inner1 OR inner2) => inner1
1617 !inner1 AND (inner1 AND inner2) => false
1618 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1619 Likewise for similar cases involving inner2. */
1620 if (inner1
== true_test_var
)
1621 return (is_and
? var
: inner1
);
1622 else if (inner2
== true_test_var
)
1623 return (is_and
? var
: inner2
);
1624 else if (inner1
== false_test_var
)
1626 ? boolean_false_node
1627 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1628 else if (inner2
== false_test_var
)
1630 ? boolean_false_node
1631 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1633 /* Next, redistribute/reassociate the AND across the inner tests.
1634 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1635 if (TREE_CODE (inner1
) == SSA_NAME
1636 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1637 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1638 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1639 gimple_assign_rhs1 (s
),
1640 gimple_assign_rhs2 (s
),
1641 code2
, op2a
, op2b
)))
1643 /* Handle the AND case, where we are reassociating:
1644 (inner1 AND inner2) AND (op2a code2 op2b)
1646 If the partial result t is a constant, we win. Otherwise
1647 continue on to try reassociating with the other inner test. */
1650 if (integer_onep (t
))
1652 else if (integer_zerop (t
))
1653 return boolean_false_node
;
1656 /* Handle the OR case, where we are redistributing:
1657 (inner1 OR inner2) AND (op2a code2 op2b)
1658 => (t OR (inner2 AND (op2a code2 op2b))) */
1659 else if (integer_onep (t
))
1660 return boolean_true_node
;
1662 /* Save partial result for later. */
1666 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1667 if (TREE_CODE (inner2
) == SSA_NAME
1668 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1669 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1670 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1671 gimple_assign_rhs1 (s
),
1672 gimple_assign_rhs2 (s
),
1673 code2
, op2a
, op2b
)))
1675 /* Handle the AND case, where we are reassociating:
1676 (inner1 AND inner2) AND (op2a code2 op2b)
1677 => (inner1 AND t) */
1680 if (integer_onep (t
))
1682 else if (integer_zerop (t
))
1683 return boolean_false_node
;
1684 /* If both are the same, we can apply the identity
1686 else if (partial
&& same_bool_result_p (t
, partial
))
1690 /* Handle the OR case. where we are redistributing:
1691 (inner1 OR inner2) AND (op2a code2 op2b)
1692 => (t OR (inner1 AND (op2a code2 op2b)))
1693 => (t OR partial) */
1696 if (integer_onep (t
))
1697 return boolean_true_node
;
1700 /* We already got a simplification for the other
1701 operand to the redistributed OR expression. The
1702 interesting case is when at least one is false.
1703 Or, if both are the same, we can apply the identity
1705 if (integer_zerop (partial
))
1707 else if (integer_zerop (t
))
1709 else if (same_bool_result_p (t
, partial
))
1718 /* Try to simplify the AND of two comparisons defined by
1719 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1720 If this can be done without constructing an intermediate value,
1721 return the resulting tree; otherwise NULL_TREE is returned.
1722 This function is deliberately asymmetric as it recurses on SSA_DEFs
1723 in the first comparison but not the second. */
1726 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1727 enum tree_code code2
, tree op2a
, tree op2b
)
1729 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
1731 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1732 if (operand_equal_p (op1a
, op2a
, 0)
1733 && operand_equal_p (op1b
, op2b
, 0))
1735 /* Result will be either NULL_TREE, or a combined comparison. */
1736 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1737 TRUTH_ANDIF_EXPR
, code1
, code2
,
1738 truth_type
, op1a
, op1b
);
1743 /* Likewise the swapped case of the above. */
1744 if (operand_equal_p (op1a
, op2b
, 0)
1745 && operand_equal_p (op1b
, op2a
, 0))
1747 /* Result will be either NULL_TREE, or a combined comparison. */
1748 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1749 TRUTH_ANDIF_EXPR
, code1
,
1750 swap_tree_comparison (code2
),
1751 truth_type
, op1a
, op1b
);
1756 /* If both comparisons are of the same value against constants, we might
1757 be able to merge them. */
1758 if (operand_equal_p (op1a
, op2a
, 0)
1759 && TREE_CODE (op1b
) == INTEGER_CST
1760 && TREE_CODE (op2b
) == INTEGER_CST
)
1762 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1764 /* If we have (op1a == op1b), we should either be able to
1765 return that or FALSE, depending on whether the constant op1b
1766 also satisfies the other comparison against op2b. */
1767 if (code1
== EQ_EXPR
)
1773 case EQ_EXPR
: val
= (cmp
== 0); break;
1774 case NE_EXPR
: val
= (cmp
!= 0); break;
1775 case LT_EXPR
: val
= (cmp
< 0); break;
1776 case GT_EXPR
: val
= (cmp
> 0); break;
1777 case LE_EXPR
: val
= (cmp
<= 0); break;
1778 case GE_EXPR
: val
= (cmp
>= 0); break;
1779 default: done
= false;
1784 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1786 return boolean_false_node
;
1789 /* Likewise if the second comparison is an == comparison. */
1790 else if (code2
== EQ_EXPR
)
1796 case EQ_EXPR
: val
= (cmp
== 0); break;
1797 case NE_EXPR
: val
= (cmp
!= 0); break;
1798 case LT_EXPR
: val
= (cmp
> 0); break;
1799 case GT_EXPR
: val
= (cmp
< 0); break;
1800 case LE_EXPR
: val
= (cmp
>= 0); break;
1801 case GE_EXPR
: val
= (cmp
<= 0); break;
1802 default: done
= false;
1807 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1809 return boolean_false_node
;
1813 /* Same business with inequality tests. */
1814 else if (code1
== NE_EXPR
)
1819 case EQ_EXPR
: val
= (cmp
!= 0); break;
1820 case NE_EXPR
: val
= (cmp
== 0); break;
1821 case LT_EXPR
: val
= (cmp
>= 0); break;
1822 case GT_EXPR
: val
= (cmp
<= 0); break;
1823 case LE_EXPR
: val
= (cmp
> 0); break;
1824 case GE_EXPR
: val
= (cmp
< 0); break;
1829 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1831 else if (code2
== NE_EXPR
)
1836 case EQ_EXPR
: val
= (cmp
== 0); break;
1837 case NE_EXPR
: val
= (cmp
!= 0); break;
1838 case LT_EXPR
: val
= (cmp
<= 0); break;
1839 case GT_EXPR
: val
= (cmp
>= 0); break;
1840 case LE_EXPR
: val
= (cmp
< 0); break;
1841 case GE_EXPR
: val
= (cmp
> 0); break;
1846 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1849 /* Chose the more restrictive of two < or <= comparisons. */
1850 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1851 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1853 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1854 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1856 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1859 /* Likewise chose the more restrictive of two > or >= comparisons. */
1860 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1861 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1863 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1864 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1866 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1869 /* Check for singleton ranges. */
1871 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1872 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1873 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1875 /* Check for disjoint ranges. */
1877 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1878 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1879 return boolean_false_node
;
1881 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1882 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1883 return boolean_false_node
;
1886 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1887 NAME's definition is a truth value. See if there are any simplifications
1888 that can be done against the NAME's definition. */
1889 if (TREE_CODE (op1a
) == SSA_NAME
1890 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1891 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1893 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1894 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1895 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1896 switch (gimple_code (stmt
))
1899 /* Try to simplify by copy-propagating the definition. */
1900 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1903 /* If every argument to the PHI produces the same result when
1904 ANDed with the second comparison, we win.
1905 Do not do this unless the type is bool since we need a bool
1906 result here anyway. */
1907 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1909 tree result
= NULL_TREE
;
1911 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1913 tree arg
= gimple_phi_arg_def (stmt
, i
);
1915 /* If this PHI has itself as an argument, ignore it.
1916 If all the other args produce the same result,
1918 if (arg
== gimple_phi_result (stmt
))
1920 else if (TREE_CODE (arg
) == INTEGER_CST
)
1922 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1925 result
= boolean_false_node
;
1926 else if (!integer_zerop (result
))
1930 result
= fold_build2 (code2
, boolean_type_node
,
1932 else if (!same_bool_comparison_p (result
,
1936 else if (TREE_CODE (arg
) == SSA_NAME
1937 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1940 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1941 /* In simple cases we can look through PHI nodes,
1942 but we have to be careful with loops.
1944 if (! dom_info_available_p (CDI_DOMINATORS
)
1945 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1946 || dominated_by_p (CDI_DOMINATORS
,
1947 gimple_bb (def_stmt
),
1950 temp
= and_var_with_comparison (arg
, invert
, code2
,
1956 else if (!same_bool_result_p (result
, temp
))
1972 /* Try to simplify the AND of two comparisons, specified by
1973 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1974 If this can be simplified to a single expression (without requiring
1975 introducing more SSA variables to hold intermediate values),
1976 return the resulting tree. Otherwise return NULL_TREE.
1977 If the result expression is non-null, it has boolean type. */
1980 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1981 enum tree_code code2
, tree op2a
, tree op2b
)
1983 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1987 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1990 /* Helper function for or_comparisons_1: try to simplify the OR of the
1991 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1992 If INVERT is true, invert the value of VAR before doing the OR.
1993 Return NULL_EXPR if we can't simplify this to a single expression. */
1996 or_var_with_comparison (tree var
, bool invert
,
1997 enum tree_code code2
, tree op2a
, tree op2b
)
2000 gimple stmt
= SSA_NAME_DEF_STMT (var
);
2002 /* We can only deal with variables whose definitions are assignments. */
2003 if (!is_gimple_assign (stmt
))
2006 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2007 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2008 Then we only have to consider the simpler non-inverted cases. */
2010 t
= and_var_with_comparison_1 (stmt
,
2011 invert_tree_comparison (code2
, false),
2014 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2015 return canonicalize_bool (t
, invert
);
2018 /* Try to simplify the OR of the ssa variable defined by the assignment
2019 STMT with the comparison specified by (OP2A CODE2 OP2B).
2020 Return NULL_EXPR if we can't simplify this to a single expression. */
2023 or_var_with_comparison_1 (gimple stmt
,
2024 enum tree_code code2
, tree op2a
, tree op2b
)
2026 tree var
= gimple_assign_lhs (stmt
);
2027 tree true_test_var
= NULL_TREE
;
2028 tree false_test_var
= NULL_TREE
;
2029 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2031 /* Check for identities like (var OR (var != 0)) => true . */
2032 if (TREE_CODE (op2a
) == SSA_NAME
2033 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2035 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2036 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2038 true_test_var
= op2a
;
2039 if (var
== true_test_var
)
2042 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2043 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2045 false_test_var
= op2a
;
2046 if (var
== false_test_var
)
2047 return boolean_true_node
;
2051 /* If the definition is a comparison, recurse on it. */
2052 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2054 tree t
= or_comparisons_1 (innercode
,
2055 gimple_assign_rhs1 (stmt
),
2056 gimple_assign_rhs2 (stmt
),
2064 /* If the definition is an AND or OR expression, we may be able to
2065 simplify by reassociating. */
2066 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2067 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2069 tree inner1
= gimple_assign_rhs1 (stmt
);
2070 tree inner2
= gimple_assign_rhs2 (stmt
);
2073 tree partial
= NULL_TREE
;
2074 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2076 /* Check for boolean identities that don't require recursive examination
2078 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2079 inner1 OR (inner1 AND inner2) => inner1
2080 !inner1 OR (inner1 OR inner2) => true
2081 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2083 if (inner1
== true_test_var
)
2084 return (is_or
? var
: inner1
);
2085 else if (inner2
== true_test_var
)
2086 return (is_or
? var
: inner2
);
2087 else if (inner1
== false_test_var
)
2090 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2091 else if (inner2
== false_test_var
)
2094 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2096 /* Next, redistribute/reassociate the OR across the inner tests.
2097 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2098 if (TREE_CODE (inner1
) == SSA_NAME
2099 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2100 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2101 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2102 gimple_assign_rhs1 (s
),
2103 gimple_assign_rhs2 (s
),
2104 code2
, op2a
, op2b
)))
2106 /* Handle the OR case, where we are reassociating:
2107 (inner1 OR inner2) OR (op2a code2 op2b)
2109 If the partial result t is a constant, we win. Otherwise
2110 continue on to try reassociating with the other inner test. */
2113 if (integer_onep (t
))
2114 return boolean_true_node
;
2115 else if (integer_zerop (t
))
2119 /* Handle the AND case, where we are redistributing:
2120 (inner1 AND inner2) OR (op2a code2 op2b)
2121 => (t AND (inner2 OR (op2a code op2b))) */
2122 else if (integer_zerop (t
))
2123 return boolean_false_node
;
2125 /* Save partial result for later. */
2129 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2130 if (TREE_CODE (inner2
) == SSA_NAME
2131 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2132 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2133 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2134 gimple_assign_rhs1 (s
),
2135 gimple_assign_rhs2 (s
),
2136 code2
, op2a
, op2b
)))
2138 /* Handle the OR case, where we are reassociating:
2139 (inner1 OR inner2) OR (op2a code2 op2b)
2141 => (t OR partial) */
2144 if (integer_zerop (t
))
2146 else if (integer_onep (t
))
2147 return boolean_true_node
;
2148 /* If both are the same, we can apply the identity
2150 else if (partial
&& same_bool_result_p (t
, partial
))
2154 /* Handle the AND case, where we are redistributing:
2155 (inner1 AND inner2) OR (op2a code2 op2b)
2156 => (t AND (inner1 OR (op2a code2 op2b)))
2157 => (t AND partial) */
2160 if (integer_zerop (t
))
2161 return boolean_false_node
;
2164 /* We already got a simplification for the other
2165 operand to the redistributed AND expression. The
2166 interesting case is when at least one is true.
2167 Or, if both are the same, we can apply the identity
2169 if (integer_onep (partial
))
2171 else if (integer_onep (t
))
2173 else if (same_bool_result_p (t
, partial
))
2182 /* Try to simplify the OR of two comparisons defined by
2183 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2184 If this can be done without constructing an intermediate value,
2185 return the resulting tree; otherwise NULL_TREE is returned.
2186 This function is deliberately asymmetric as it recurses on SSA_DEFs
2187 in the first comparison but not the second. */
2190 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2191 enum tree_code code2
, tree op2a
, tree op2b
)
2193 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
2195 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2196 if (operand_equal_p (op1a
, op2a
, 0)
2197 && operand_equal_p (op1b
, op2b
, 0))
2199 /* Result will be either NULL_TREE, or a combined comparison. */
2200 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2201 TRUTH_ORIF_EXPR
, code1
, code2
,
2202 truth_type
, op1a
, op1b
);
2207 /* Likewise the swapped case of the above. */
2208 if (operand_equal_p (op1a
, op2b
, 0)
2209 && operand_equal_p (op1b
, op2a
, 0))
2211 /* Result will be either NULL_TREE, or a combined comparison. */
2212 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2213 TRUTH_ORIF_EXPR
, code1
,
2214 swap_tree_comparison (code2
),
2215 truth_type
, op1a
, op1b
);
2220 /* If both comparisons are of the same value against constants, we might
2221 be able to merge them. */
2222 if (operand_equal_p (op1a
, op2a
, 0)
2223 && TREE_CODE (op1b
) == INTEGER_CST
2224 && TREE_CODE (op2b
) == INTEGER_CST
)
2226 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2228 /* If we have (op1a != op1b), we should either be able to
2229 return that or TRUE, depending on whether the constant op1b
2230 also satisfies the other comparison against op2b. */
2231 if (code1
== NE_EXPR
)
2237 case EQ_EXPR
: val
= (cmp
== 0); break;
2238 case NE_EXPR
: val
= (cmp
!= 0); break;
2239 case LT_EXPR
: val
= (cmp
< 0); break;
2240 case GT_EXPR
: val
= (cmp
> 0); break;
2241 case LE_EXPR
: val
= (cmp
<= 0); break;
2242 case GE_EXPR
: val
= (cmp
>= 0); break;
2243 default: done
= false;
2248 return boolean_true_node
;
2250 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2253 /* Likewise if the second comparison is a != comparison. */
2254 else if (code2
== NE_EXPR
)
2260 case EQ_EXPR
: val
= (cmp
== 0); break;
2261 case NE_EXPR
: val
= (cmp
!= 0); break;
2262 case LT_EXPR
: val
= (cmp
> 0); break;
2263 case GT_EXPR
: val
= (cmp
< 0); break;
2264 case LE_EXPR
: val
= (cmp
>= 0); break;
2265 case GE_EXPR
: val
= (cmp
<= 0); break;
2266 default: done
= false;
2271 return boolean_true_node
;
2273 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2277 /* See if an equality test is redundant with the other comparison. */
2278 else if (code1
== EQ_EXPR
)
2283 case EQ_EXPR
: val
= (cmp
== 0); break;
2284 case NE_EXPR
: val
= (cmp
!= 0); break;
2285 case LT_EXPR
: val
= (cmp
< 0); break;
2286 case GT_EXPR
: val
= (cmp
> 0); break;
2287 case LE_EXPR
: val
= (cmp
<= 0); break;
2288 case GE_EXPR
: val
= (cmp
>= 0); break;
2293 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2295 else if (code2
== EQ_EXPR
)
2300 case EQ_EXPR
: val
= (cmp
== 0); break;
2301 case NE_EXPR
: val
= (cmp
!= 0); break;
2302 case LT_EXPR
: val
= (cmp
> 0); break;
2303 case GT_EXPR
: val
= (cmp
< 0); break;
2304 case LE_EXPR
: val
= (cmp
>= 0); break;
2305 case GE_EXPR
: val
= (cmp
<= 0); break;
2310 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2313 /* Chose the less restrictive of two < or <= comparisons. */
2314 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2315 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2317 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2318 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2320 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2323 /* Likewise chose the less restrictive of two > or >= comparisons. */
2324 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2325 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2327 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2328 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2330 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2333 /* Check for singleton ranges. */
2335 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2336 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2337 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2339 /* Check for less/greater pairs that don't restrict the range at all. */
2341 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2342 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2343 return boolean_true_node
;
2345 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2346 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2347 return boolean_true_node
;
2350 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2351 NAME's definition is a truth value. See if there are any simplifications
2352 that can be done against the NAME's definition. */
2353 if (TREE_CODE (op1a
) == SSA_NAME
2354 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2355 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2357 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2358 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2359 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2360 switch (gimple_code (stmt
))
2363 /* Try to simplify by copy-propagating the definition. */
2364 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2367 /* If every argument to the PHI produces the same result when
2368 ORed with the second comparison, we win.
2369 Do not do this unless the type is bool since we need a bool
2370 result here anyway. */
2371 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2373 tree result
= NULL_TREE
;
2375 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2377 tree arg
= gimple_phi_arg_def (stmt
, i
);
2379 /* If this PHI has itself as an argument, ignore it.
2380 If all the other args produce the same result,
2382 if (arg
== gimple_phi_result (stmt
))
2384 else if (TREE_CODE (arg
) == INTEGER_CST
)
2386 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2389 result
= boolean_true_node
;
2390 else if (!integer_onep (result
))
2394 result
= fold_build2 (code2
, boolean_type_node
,
2396 else if (!same_bool_comparison_p (result
,
2400 else if (TREE_CODE (arg
) == SSA_NAME
2401 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2404 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2405 /* In simple cases we can look through PHI nodes,
2406 but we have to be careful with loops.
2408 if (! dom_info_available_p (CDI_DOMINATORS
)
2409 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2410 || dominated_by_p (CDI_DOMINATORS
,
2411 gimple_bb (def_stmt
),
2414 temp
= or_var_with_comparison (arg
, invert
, code2
,
2420 else if (!same_bool_result_p (result
, temp
))
2436 /* Try to simplify the OR of two comparisons, specified by
2437 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2438 If this can be simplified to a single expression (without requiring
2439 introducing more SSA variables to hold intermediate values),
2440 return the resulting tree. Otherwise return NULL_TREE.
2441 If the result expression is non-null, it has boolean type. */
2444 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2445 enum tree_code code2
, tree op2a
, tree op2b
)
2447 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2451 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2455 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2457 Either NULL_TREE, a simplified but non-constant or a constant
2460 ??? This should go into a gimple-fold-inline.h file to be eventually
2461 privatized with the single valueize function used in the various TUs
2462 to avoid the indirect function call overhead. */
2465 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2467 location_t loc
= gimple_location (stmt
);
2468 switch (gimple_code (stmt
))
2472 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2474 switch (get_gimple_rhs_class (subcode
))
2476 case GIMPLE_SINGLE_RHS
:
2478 tree rhs
= gimple_assign_rhs1 (stmt
);
2479 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2481 if (TREE_CODE (rhs
) == SSA_NAME
)
2483 /* If the RHS is an SSA_NAME, return its known constant value,
2485 return (*valueize
) (rhs
);
2487 /* Handle propagating invariant addresses into address
2489 else if (TREE_CODE (rhs
) == ADDR_EXPR
2490 && !is_gimple_min_invariant (rhs
))
2492 HOST_WIDE_INT offset
= 0;
2494 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2498 && (CONSTANT_CLASS_P (base
)
2499 || decl_address_invariant_p (base
)))
2500 return build_invariant_address (TREE_TYPE (rhs
),
2503 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2504 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2505 && (CONSTRUCTOR_NELTS (rhs
)
2506 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2511 vec
= XALLOCAVEC (tree
,
2512 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2513 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2515 val
= (*valueize
) (val
);
2516 if (TREE_CODE (val
) == INTEGER_CST
2517 || TREE_CODE (val
) == REAL_CST
2518 || TREE_CODE (val
) == FIXED_CST
)
2524 return build_vector (TREE_TYPE (rhs
), vec
);
2527 if (kind
== tcc_reference
)
2529 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2530 || TREE_CODE (rhs
) == REALPART_EXPR
2531 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2532 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2534 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2535 return fold_unary_loc (EXPR_LOCATION (rhs
),
2537 TREE_TYPE (rhs
), val
);
2539 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2540 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2542 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2543 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2545 TREE_TYPE (rhs
), val
,
2546 TREE_OPERAND (rhs
, 1),
2547 TREE_OPERAND (rhs
, 2));
2549 else if (TREE_CODE (rhs
) == MEM_REF
2550 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2552 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2553 if (TREE_CODE (val
) == ADDR_EXPR
2554 && is_gimple_min_invariant (val
))
2556 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2558 TREE_OPERAND (rhs
, 1));
2563 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2565 else if (kind
== tcc_declaration
)
2566 return get_symbol_constant_value (rhs
);
2570 case GIMPLE_UNARY_RHS
:
2572 /* Handle unary operators that can appear in GIMPLE form.
2573 Note that we know the single operand must be a constant,
2574 so this should almost always return a simplified RHS. */
2575 tree lhs
= gimple_assign_lhs (stmt
);
2576 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2578 /* Conversions are useless for CCP purposes if they are
2579 value-preserving. Thus the restrictions that
2580 useless_type_conversion_p places for restrict qualification
2581 of pointer types should not apply here.
2582 Substitution later will only substitute to allowed places. */
2583 if (CONVERT_EXPR_CODE_P (subcode
)
2584 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2585 && POINTER_TYPE_P (TREE_TYPE (op0
))
2586 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2587 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2588 && TYPE_MODE (TREE_TYPE (lhs
))
2589 == TYPE_MODE (TREE_TYPE (op0
)))
2593 fold_unary_ignore_overflow_loc (loc
, subcode
,
2594 gimple_expr_type (stmt
), op0
);
2597 case GIMPLE_BINARY_RHS
:
2599 /* Handle binary operators that can appear in GIMPLE form. */
2600 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2601 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2603 /* Translate &x + CST into an invariant form suitable for
2604 further propagation. */
2605 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2606 && TREE_CODE (op0
) == ADDR_EXPR
2607 && TREE_CODE (op1
) == INTEGER_CST
)
2609 tree off
= fold_convert (ptr_type_node
, op1
);
2610 return build_fold_addr_expr_loc
2612 fold_build2 (MEM_REF
,
2613 TREE_TYPE (TREE_TYPE (op0
)),
2614 unshare_expr (op0
), off
));
2617 return fold_binary_loc (loc
, subcode
,
2618 gimple_expr_type (stmt
), op0
, op1
);
2621 case GIMPLE_TERNARY_RHS
:
2623 /* Handle ternary operators that can appear in GIMPLE form. */
2624 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2625 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2626 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2628 /* Fold embedded expressions in ternary codes. */
2629 if ((subcode
== COND_EXPR
2630 || subcode
== VEC_COND_EXPR
)
2631 && COMPARISON_CLASS_P (op0
))
2633 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2634 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2635 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2636 TREE_TYPE (op0
), op00
, op01
);
2641 return fold_ternary_loc (loc
, subcode
,
2642 gimple_expr_type (stmt
), op0
, op1
, op2
);
2654 if (gimple_call_internal_p (stmt
))
2655 /* No folding yet for these functions. */
2658 fn
= (*valueize
) (gimple_call_fn (stmt
));
2659 if (TREE_CODE (fn
) == ADDR_EXPR
2660 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2661 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2663 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2666 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2667 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2668 call
= build_call_array_loc (loc
,
2669 gimple_call_return_type (stmt
),
2670 fn
, gimple_call_num_args (stmt
), args
);
2671 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2673 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2674 STRIP_NOPS (retval
);
2685 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2686 Returns NULL_TREE if folding to a constant is not possible, otherwise
2687 returns a constant according to is_gimple_min_invariant. */
2690 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2692 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2693 if (res
&& is_gimple_min_invariant (res
))
2699 /* The following set of functions are supposed to fold references using
2700 their constant initializers. */
2702 static tree
fold_ctor_reference (tree type
, tree ctor
,
2703 unsigned HOST_WIDE_INT offset
,
2704 unsigned HOST_WIDE_INT size
, tree
);
2706 /* See if we can find constructor defining value of BASE.
2707 When we know the consructor with constant offset (such as
2708 base is array[40] and we do know constructor of array), then
2709 BIT_OFFSET is adjusted accordingly.
2711 As a special case, return error_mark_node when constructor
2712 is not explicitly available, but it is known to be zero
2713 such as 'static const int a;'. */
2715 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2716 tree (*valueize
)(tree
))
2718 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2719 if (TREE_CODE (base
) == MEM_REF
)
2721 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2723 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
2725 *bit_offset
+= (mem_ref_offset (base
).low
2730 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2731 base
= valueize (TREE_OPERAND (base
, 0));
2732 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2734 base
= TREE_OPERAND (base
, 0);
2737 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2738 DECL_INITIAL. If BASE is a nested reference into another
2739 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2740 the inner reference. */
2741 switch (TREE_CODE (base
))
2746 tree init
= ctor_for_folding (base
);
2748 /* Our semantic is exact opposite of ctor_for_folding;
2749 NULL means unknown, while error_mark_node is 0. */
2750 if (init
== error_mark_node
)
2753 return error_mark_node
;
2759 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2760 if (max_size
== -1 || size
!= max_size
)
2762 *bit_offset
+= bit_offset2
;
2763 return get_base_constructor (base
, bit_offset
, valueize
);
2774 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2775 to the memory at bit OFFSET.
2777 We do only simple job of folding byte accesses. */
2780 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2781 unsigned HOST_WIDE_INT offset
,
2782 unsigned HOST_WIDE_INT size
)
2784 if (INTEGRAL_TYPE_P (type
)
2785 && (TYPE_MODE (type
)
2786 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2787 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2789 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2790 && size
== BITS_PER_UNIT
2791 && !(offset
% BITS_PER_UNIT
))
2793 offset
/= BITS_PER_UNIT
;
2794 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2795 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2798 const char a[20]="hello";
2801 might lead to offset greater than string length. In this case we
2802 know value is either initialized to 0 or out of bounds. Return 0
2804 return build_zero_cst (type
);
2809 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2810 SIZE to the memory at bit OFFSET. */
2813 fold_array_ctor_reference (tree type
, tree ctor
,
2814 unsigned HOST_WIDE_INT offset
,
2815 unsigned HOST_WIDE_INT size
,
2818 unsigned HOST_WIDE_INT cnt
;
2820 double_int low_bound
, elt_size
;
2821 double_int index
, max_index
;
2822 double_int access_index
;
2823 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2824 HOST_WIDE_INT inner_offset
;
2826 /* Compute low bound and elt size. */
2827 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2828 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2829 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2831 /* Static constructors for variably sized objects makes no sense. */
2832 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2833 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2834 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2837 low_bound
= double_int_zero
;
2838 /* Static constructors for variably sized objects makes no sense. */
2839 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2842 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2845 /* We can handle only constantly sized accesses that are known to not
2846 be larger than size of array element. */
2847 if (!TYPE_SIZE_UNIT (type
)
2848 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2849 || elt_size
.slt (tree_to_double_int (TYPE_SIZE_UNIT (type
))))
2852 /* Compute the array index we look for. */
2853 access_index
= double_int::from_uhwi (offset
/ BITS_PER_UNIT
)
2854 .udiv (elt_size
, TRUNC_DIV_EXPR
);
2855 access_index
+= low_bound
;
2857 access_index
= access_index
.ext (TYPE_PRECISION (index_type
),
2858 TYPE_UNSIGNED (index_type
));
2860 /* And offset within the access. */
2861 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
2863 /* See if the array field is large enough to span whole access. We do not
2864 care to fold accesses spanning multiple array indexes. */
2865 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
2868 index
= low_bound
- double_int_one
;
2870 index
= index
.ext (TYPE_PRECISION (index_type
), TYPE_UNSIGNED (index_type
));
2872 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2874 /* Array constructor might explicitely set index, or specify range
2875 or leave index NULL meaning that it is next index after previous
2879 if (TREE_CODE (cfield
) == INTEGER_CST
)
2880 max_index
= index
= tree_to_double_int (cfield
);
2883 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2884 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2885 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2890 index
+= double_int_one
;
2892 index
= index
.ext (TYPE_PRECISION (index_type
),
2893 TYPE_UNSIGNED (index_type
));
2897 /* Do we have match? */
2898 if (access_index
.cmp (index
, 1) >= 0
2899 && access_index
.cmp (max_index
, 1) <= 0)
2900 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
2903 /* When memory is not explicitely mentioned in constructor,
2904 it is 0 (or out of range). */
2905 return build_zero_cst (type
);
2908 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2909 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2912 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2913 unsigned HOST_WIDE_INT offset
,
2914 unsigned HOST_WIDE_INT size
,
2917 unsigned HOST_WIDE_INT cnt
;
2920 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2923 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2924 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2925 tree field_size
= DECL_SIZE (cfield
);
2926 double_int bitoffset
;
2927 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2928 double_int bits_per_unit_cst
= double_int::from_uhwi (BITS_PER_UNIT
);
2929 double_int bitoffset_end
, access_end
;
2931 /* Variable sized objects in static constructors makes no sense,
2932 but field_size can be NULL for flexible array members. */
2933 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2934 && TREE_CODE (byte_offset
) == INTEGER_CST
2935 && (field_size
!= NULL_TREE
2936 ? TREE_CODE (field_size
) == INTEGER_CST
2937 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2939 /* Compute bit offset of the field. */
2940 bitoffset
= tree_to_double_int (field_offset
)
2941 + byte_offset_cst
* bits_per_unit_cst
;
2942 /* Compute bit offset where the field ends. */
2943 if (field_size
!= NULL_TREE
)
2944 bitoffset_end
= bitoffset
+ tree_to_double_int (field_size
);
2946 bitoffset_end
= double_int_zero
;
2948 access_end
= double_int::from_uhwi (offset
)
2949 + double_int::from_uhwi (size
);
2951 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2952 [BITOFFSET, BITOFFSET_END)? */
2953 if (access_end
.cmp (bitoffset
, 0) > 0
2954 && (field_size
== NULL_TREE
2955 || double_int::from_uhwi (offset
).slt (bitoffset_end
)))
2957 double_int inner_offset
= double_int::from_uhwi (offset
) - bitoffset
;
2958 /* We do have overlap. Now see if field is large enough to
2959 cover the access. Give up for accesses spanning multiple
2961 if (access_end
.cmp (bitoffset_end
, 0) > 0)
2963 if (double_int::from_uhwi (offset
).slt (bitoffset
))
2965 return fold_ctor_reference (type
, cval
,
2966 inner_offset
.to_uhwi (), size
,
2970 /* When memory is not explicitely mentioned in constructor, it is 0. */
2971 return build_zero_cst (type
);
2974 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2975 to the memory at bit OFFSET. */
2978 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
2979 unsigned HOST_WIDE_INT size
, tree from_decl
)
2983 /* We found the field with exact match. */
2984 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
2986 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
2988 /* We are at the end of walk, see if we can view convert the
2990 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
2991 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2992 && operand_equal_p (TYPE_SIZE (type
),
2993 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
2995 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
2996 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
3001 if (TREE_CODE (ctor
) == STRING_CST
)
3002 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
3003 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
3006 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
3007 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
3008 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
3011 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
3018 /* Return the tree representing the element referenced by T if T is an
3019 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3020 names using VALUEIZE. Return NULL_TREE otherwise. */
3023 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
3025 tree ctor
, idx
, base
;
3026 HOST_WIDE_INT offset
, size
, max_size
;
3029 if (TREE_THIS_VOLATILE (t
))
3032 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3033 return get_symbol_constant_value (t
);
3035 tem
= fold_read_from_constant_string (t
);
3039 switch (TREE_CODE (t
))
3042 case ARRAY_RANGE_REF
:
3043 /* Constant indexes are handled well by get_base_constructor.
3044 Only special case variable offsets.
3045 FIXME: This code can't handle nested references with variable indexes
3046 (they will be handled only by iteration of ccp). Perhaps we can bring
3047 get_ref_base_and_extent here and make it use a valueize callback. */
3048 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3050 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3051 && TREE_CODE (idx
) == INTEGER_CST
)
3053 tree low_bound
, unit_size
;
3056 /* If the resulting bit-offset is constant, track it. */
3057 if ((low_bound
= array_ref_low_bound (t
),
3058 TREE_CODE (low_bound
) == INTEGER_CST
)
3059 && (unit_size
= array_ref_element_size (t
),
3060 tree_fits_uhwi_p (unit_size
))
3061 && (doffset
= (TREE_INT_CST (idx
) - TREE_INT_CST (low_bound
))
3062 .sext (TYPE_PRECISION (TREE_TYPE (idx
))),
3063 doffset
.fits_shwi ()))
3065 offset
= doffset
.to_shwi ();
3066 offset
*= TREE_INT_CST_LOW (unit_size
);
3067 offset
*= BITS_PER_UNIT
;
3069 base
= TREE_OPERAND (t
, 0);
3070 ctor
= get_base_constructor (base
, &offset
, valueize
);
3071 /* Empty constructor. Always fold to 0. */
3072 if (ctor
== error_mark_node
)
3073 return build_zero_cst (TREE_TYPE (t
));
3074 /* Out of bound array access. Value is undefined,
3078 /* We can not determine ctor. */
3081 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3082 TREE_INT_CST_LOW (unit_size
)
3091 case TARGET_MEM_REF
:
3093 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3094 ctor
= get_base_constructor (base
, &offset
, valueize
);
3096 /* Empty constructor. Always fold to 0. */
3097 if (ctor
== error_mark_node
)
3098 return build_zero_cst (TREE_TYPE (t
));
3099 /* We do not know precise address. */
3100 if (max_size
== -1 || max_size
!= size
)
3102 /* We can not determine ctor. */
3106 /* Out of bound array access. Value is undefined, but don't fold. */
3110 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3116 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3117 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3118 return fold_build1_loc (EXPR_LOCATION (t
),
3119 TREE_CODE (t
), TREE_TYPE (t
), c
);
3131 fold_const_aggregate_ref (tree t
)
3133 return fold_const_aggregate_ref_1 (t
, NULL
);
3136 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3137 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3138 KNOWN_BINFO carries the binfo describing the true type of
3139 OBJ_TYPE_REF_OBJECT(REF). */
3142 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3144 unsigned HOST_WIDE_INT offset
, size
;
3145 tree v
, fn
, vtable
, init
;
3147 vtable
= v
= BINFO_VTABLE (known_binfo
);
3148 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3152 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3154 offset
= tree_to_uhwi (TREE_OPERAND (v
, 1)) * BITS_PER_UNIT
;
3155 v
= TREE_OPERAND (v
, 0);
3160 if (TREE_CODE (v
) != ADDR_EXPR
)
3162 v
= TREE_OPERAND (v
, 0);
3164 if (TREE_CODE (v
) != VAR_DECL
3165 || !DECL_VIRTUAL_P (v
))
3167 init
= ctor_for_folding (v
);
3169 /* The virtual tables should always be born with constructors.
3170 and we always should assume that they are avaialble for
3171 folding. At the moment we do not stream them in all cases,
3172 but it should never happen that ctor seem unreachable. */
3174 if (init
== error_mark_node
)
3176 gcc_assert (in_lto_p
);
3179 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3180 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
3181 offset
+= token
* size
;
3182 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), init
,
3184 if (!fn
|| integer_zerop (fn
))
3186 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3187 || TREE_CODE (fn
) == FDESC_EXPR
);
3188 fn
= TREE_OPERAND (fn
, 0);
3189 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3191 /* When cgraph node is missing and function is not public, we cannot
3192 devirtualize. This can happen in WHOPR when the actual method
3193 ends up in other partition, because we found devirtualization
3194 possibility too late. */
3195 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3198 /* Make sure we create a cgraph node for functions we'll reference.
3199 They can be non-existent if the reference comes from an entry
3200 of an external vtable for example. */
3201 cgraph_get_create_node (fn
);
3206 /* Return true iff VAL is a gimple expression that is known to be
3207 non-negative. Restricted to floating-point inputs. */
3210 gimple_val_nonnegative_real_p (tree val
)
3214 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3216 /* Use existing logic for non-gimple trees. */
3217 if (tree_expr_nonnegative_p (val
))
3220 if (TREE_CODE (val
) != SSA_NAME
)
3223 /* Currently we look only at the immediately defining statement
3224 to make this determination, since recursion on defining
3225 statements of operands can lead to quadratic behavior in the
3226 worst case. This is expected to catch almost all occurrences
3227 in practice. It would be possible to implement limited-depth
3228 recursion if important cases are lost. Alternatively, passes
3229 that need this information (such as the pow/powi lowering code
3230 in the cse_sincos pass) could be revised to provide it through
3231 dataflow propagation. */
3233 def_stmt
= SSA_NAME_DEF_STMT (val
);
3235 if (is_gimple_assign (def_stmt
))
3239 /* See fold-const.c:tree_expr_nonnegative_p for additional
3240 cases that could be handled with recursion. */
3242 switch (gimple_assign_rhs_code (def_stmt
))
3245 /* Always true for floating-point operands. */
3249 /* True if the two operands are identical (since we are
3250 restricted to floating-point inputs). */
3251 op0
= gimple_assign_rhs1 (def_stmt
);
3252 op1
= gimple_assign_rhs2 (def_stmt
);
3255 || operand_equal_p (op0
, op1
, 0))
3262 else if (is_gimple_call (def_stmt
))
3264 tree fndecl
= gimple_call_fndecl (def_stmt
);
3266 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3270 switch (DECL_FUNCTION_CODE (fndecl
))
3272 CASE_FLT_FN (BUILT_IN_ACOS
):
3273 CASE_FLT_FN (BUILT_IN_ACOSH
):
3274 CASE_FLT_FN (BUILT_IN_CABS
):
3275 CASE_FLT_FN (BUILT_IN_COSH
):
3276 CASE_FLT_FN (BUILT_IN_ERFC
):
3277 CASE_FLT_FN (BUILT_IN_EXP
):
3278 CASE_FLT_FN (BUILT_IN_EXP10
):
3279 CASE_FLT_FN (BUILT_IN_EXP2
):
3280 CASE_FLT_FN (BUILT_IN_FABS
):
3281 CASE_FLT_FN (BUILT_IN_FDIM
):
3282 CASE_FLT_FN (BUILT_IN_HYPOT
):
3283 CASE_FLT_FN (BUILT_IN_POW10
):
3286 CASE_FLT_FN (BUILT_IN_SQRT
):
3287 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3288 nonnegative inputs. */
3289 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3294 CASE_FLT_FN (BUILT_IN_POWI
):
3295 /* True if the second argument is an even integer. */
3296 arg1
= gimple_call_arg (def_stmt
, 1);
3298 if (TREE_CODE (arg1
) == INTEGER_CST
3299 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3304 CASE_FLT_FN (BUILT_IN_POW
):
3305 /* True if the second argument is an even integer-valued
3307 arg1
= gimple_call_arg (def_stmt
, 1);
3309 if (TREE_CODE (arg1
) == REAL_CST
)
3314 c
= TREE_REAL_CST (arg1
);
3315 n
= real_to_integer (&c
);
3319 REAL_VALUE_TYPE cint
;
3320 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3321 if (real_identical (&c
, &cint
))
3337 /* Given a pointer value OP0, return a simplified version of an
3338 indirection through OP0, or NULL_TREE if no simplification is
3339 possible. Note that the resulting type may be different from
3340 the type pointed to in the sense that it is still compatible
3341 from the langhooks point of view. */
3344 gimple_fold_indirect_ref (tree t
)
3346 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
3351 subtype
= TREE_TYPE (sub
);
3352 if (!POINTER_TYPE_P (subtype
))
3355 if (TREE_CODE (sub
) == ADDR_EXPR
)
3357 tree op
= TREE_OPERAND (sub
, 0);
3358 tree optype
= TREE_TYPE (op
);
3360 if (useless_type_conversion_p (type
, optype
))
3363 /* *(foo *)&fooarray => fooarray[0] */
3364 if (TREE_CODE (optype
) == ARRAY_TYPE
3365 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
3366 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3368 tree type_domain
= TYPE_DOMAIN (optype
);
3369 tree min_val
= size_zero_node
;
3370 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
3371 min_val
= TYPE_MIN_VALUE (type_domain
);
3372 if (TREE_CODE (min_val
) == INTEGER_CST
)
3373 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
3375 /* *(foo *)&complexfoo => __real__ complexfoo */
3376 else if (TREE_CODE (optype
) == COMPLEX_TYPE
3377 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3378 return fold_build1 (REALPART_EXPR
, type
, op
);
3379 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3380 else if (TREE_CODE (optype
) == VECTOR_TYPE
3381 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3383 tree part_width
= TYPE_SIZE (type
);
3384 tree index
= bitsize_int (0);
3385 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
3389 /* *(p + CST) -> ... */
3390 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
3391 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
3393 tree addr
= TREE_OPERAND (sub
, 0);
3394 tree off
= TREE_OPERAND (sub
, 1);
3398 addrtype
= TREE_TYPE (addr
);
3400 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3401 if (TREE_CODE (addr
) == ADDR_EXPR
3402 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
3403 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
3404 && tree_fits_uhwi_p (off
))
3406 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
3407 tree part_width
= TYPE_SIZE (type
);
3408 unsigned HOST_WIDE_INT part_widthi
3409 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
3410 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
3411 tree index
= bitsize_int (indexi
);
3412 if (offset
/ part_widthi
3413 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
3414 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
3418 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3419 if (TREE_CODE (addr
) == ADDR_EXPR
3420 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
3421 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
3423 tree size
= TYPE_SIZE_UNIT (type
);
3424 if (tree_int_cst_equal (size
, off
))
3425 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
3428 /* *(p + CST) -> MEM_REF <p, CST>. */
3429 if (TREE_CODE (addr
) != ADDR_EXPR
3430 || DECL_P (TREE_OPERAND (addr
, 0)))
3431 return fold_build2 (MEM_REF
, type
,
3433 build_int_cst_wide (ptype
,
3434 TREE_INT_CST_LOW (off
),
3435 TREE_INT_CST_HIGH (off
)));
3438 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3439 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
3440 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
3441 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
3444 tree min_val
= size_zero_node
;
3446 sub
= gimple_fold_indirect_ref (sub
);
3448 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
3449 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
3450 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
3451 min_val
= TYPE_MIN_VALUE (type_domain
);
3452 if (TREE_CODE (min_val
) == INTEGER_CST
)
3453 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);