1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-ssa-propagate.h"
32 #include "gimple-fold.h"
33 #include "ipa-utils.h"
34 #include "gimple-pretty-print.h"
36 /* Return true when DECL can be referenced from current unit.
37 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
38 We can get declarations that are not possible to reference for various
41 1) When analyzing C++ virtual tables.
42 C++ virtual tables do have known constructors even
43 when they are keyed to other compilation unit.
44 Those tables can contain pointers to methods and vars
45 in other units. Those methods have both STATIC and EXTERNAL
47 2) In WHOPR mode devirtualization might lead to reference
48 to method that was partitioned elsehwere.
49 In this case we have static VAR_DECL or FUNCTION_DECL
50 that has no corresponding callgraph/varpool node
52 3) COMDAT functions referred by external vtables that
53 we devirtualize only during final copmilation stage.
54 At this time we already decided that we will not output
55 the function body and thus we can't reference the symbol
59 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
61 struct varpool_node
*vnode
;
62 struct cgraph_node
*node
;
65 if (DECL_ABSTRACT (decl
))
68 /* We are concerned only about static/external vars and functions. */
69 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
70 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
73 /* Static objects can be referred only if they was not optimized out yet. */
74 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
76 snode
= symtab_get_node (decl
);
79 node
= dyn_cast
<cgraph_node
> (snode
);
80 return !node
|| !node
->global
.inlined_to
;
83 /* We will later output the initializer, so we can refer to it.
84 So we are concerned only when DECL comes from initializer of
87 || TREE_CODE (from_decl
) != VAR_DECL
88 || !DECL_EXTERNAL (from_decl
)
90 && symtab_get_node (from_decl
)->symbol
.in_other_partition
))
92 /* We are folding reference from external vtable. The vtable may reffer
93 to a symbol keyed to other compilation unit. The other compilation
94 unit may be in separate DSO and the symbol may be hidden. */
95 if (DECL_VISIBILITY_SPECIFIED (decl
)
96 && DECL_EXTERNAL (decl
)
97 && (!(snode
= symtab_get_node (decl
)) || !snode
->symbol
.in_other_partition
))
99 /* When function is public, we always can introduce new reference.
100 Exception are the COMDAT functions where introducing a direct
101 reference imply need to include function body in the curren tunit. */
102 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
104 /* We are not at ltrans stage; so don't worry about WHOPR.
105 Also when still gimplifying all referred comdat functions will be
108 As observed in PR20991 for already optimized out comdat virtual functions
109 it may be tempting to not necessarily give up because the copy will be
110 output elsewhere when corresponding vtable is output.
111 This is however not possible - ABI specify that COMDATs are output in
112 units where they are used and when the other unit was compiled with LTO
113 it is possible that vtable was kept public while the function itself
115 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
118 /* OK we are seeing either COMDAT or static variable. In this case we must
119 check that the definition is still around so we can refer it. */
120 if (TREE_CODE (decl
) == FUNCTION_DECL
)
122 node
= cgraph_get_node (decl
);
123 /* Check that we still have function body and that we didn't took
124 the decision to eliminate offline copy of the function yet.
125 The second is important when devirtualization happens during final
126 compilation stage when making a new reference no longer makes callee
128 if (!node
|| !node
->symbol
.definition
|| node
->global
.inlined_to
)
130 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
134 else if (TREE_CODE (decl
) == VAR_DECL
)
136 vnode
= varpool_get_node (decl
);
137 if (!vnode
|| !vnode
->symbol
.definition
)
139 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
146 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
147 acceptable form for is_gimple_min_invariant.
148 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
151 canonicalize_constructor_val (tree cval
, tree from_decl
)
153 tree orig_cval
= cval
;
155 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
156 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
158 tree ptr
= TREE_OPERAND (cval
, 0);
159 if (is_gimple_min_invariant (ptr
))
160 cval
= build1_loc (EXPR_LOCATION (cval
),
161 ADDR_EXPR
, TREE_TYPE (ptr
),
162 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
164 fold_convert (ptr_type_node
,
165 TREE_OPERAND (cval
, 1))));
167 if (TREE_CODE (cval
) == ADDR_EXPR
)
169 tree base
= NULL_TREE
;
170 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
172 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
174 TREE_OPERAND (cval
, 0) = base
;
177 base
= get_base_address (TREE_OPERAND (cval
, 0));
181 if ((TREE_CODE (base
) == VAR_DECL
182 || TREE_CODE (base
) == FUNCTION_DECL
)
183 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
185 if (TREE_CODE (base
) == VAR_DECL
)
186 TREE_ADDRESSABLE (base
) = 1;
187 else if (TREE_CODE (base
) == FUNCTION_DECL
)
189 /* Make sure we create a cgraph node for functions we'll reference.
190 They can be non-existent if the reference comes from an entry
191 of an external vtable for example. */
192 cgraph_get_create_real_symbol_node (base
);
194 /* Fixup types in global initializers. */
195 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
196 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
198 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
199 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
205 /* If SYM is a constant variable with known value, return the value.
206 NULL_TREE is returned otherwise. */
209 get_symbol_constant_value (tree sym
)
211 tree val
= ctor_for_folding (sym
);
212 if (val
!= error_mark_node
)
216 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
217 if (val
&& is_gimple_min_invariant (val
))
222 /* Variables declared 'const' without an initializer
223 have zero as the initializer if they may not be
224 overridden at link or run time. */
226 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
227 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
228 return build_zero_cst (TREE_TYPE (sym
));
236 /* Subroutine of fold_stmt. We perform several simplifications of the
237 memory reference tree EXPR and make sure to re-gimplify them properly
238 after propagation of constant addresses. IS_LHS is true if the
239 reference is supposed to be an lvalue. */
242 maybe_fold_reference (tree expr
, bool is_lhs
)
247 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
248 || TREE_CODE (expr
) == REALPART_EXPR
249 || TREE_CODE (expr
) == IMAGPART_EXPR
)
250 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
251 return fold_unary_loc (EXPR_LOCATION (expr
),
254 TREE_OPERAND (expr
, 0));
255 else if (TREE_CODE (expr
) == BIT_FIELD_REF
256 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
257 return fold_ternary_loc (EXPR_LOCATION (expr
),
260 TREE_OPERAND (expr
, 0),
261 TREE_OPERAND (expr
, 1),
262 TREE_OPERAND (expr
, 2));
264 while (handled_component_p (*t
))
265 t
= &TREE_OPERAND (*t
, 0);
267 /* Canonicalize MEM_REFs invariant address operand. Do this first
268 to avoid feeding non-canonical MEM_REFs elsewhere. */
269 if (TREE_CODE (*t
) == MEM_REF
270 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
272 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
273 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
274 TREE_OPERAND (*t
, 0),
275 TREE_OPERAND (*t
, 1));
278 TREE_THIS_VOLATILE (tem
) = volatile_p
;
280 tem
= maybe_fold_reference (expr
, is_lhs
);
288 && (result
= fold_const_aggregate_ref (expr
))
289 && is_gimple_min_invariant (result
))
292 /* Fold back MEM_REFs to reference trees. */
293 if (TREE_CODE (*t
) == MEM_REF
294 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
295 && integer_zerop (TREE_OPERAND (*t
, 1))
296 && (TREE_THIS_VOLATILE (*t
)
297 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
298 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
299 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
300 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
301 /* We have to look out here to not drop a required conversion
302 from the rhs to the lhs if is_lhs, but we don't have the
303 rhs here to verify that. Thus require strict type
305 && types_compatible_p (TREE_TYPE (*t
),
306 TREE_TYPE (TREE_OPERAND
307 (TREE_OPERAND (*t
, 0), 0))))
310 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
311 tem
= maybe_fold_reference (expr
, is_lhs
);
316 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
318 tree tem
= maybe_fold_tmr (*t
);
322 tem
= maybe_fold_reference (expr
, is_lhs
);
333 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
334 replacement rhs for the statement or NULL_TREE if no simplification
335 could be made. It is assumed that the operands have been previously
339 fold_gimple_assign (gimple_stmt_iterator
*si
)
341 gimple stmt
= gsi_stmt (*si
);
342 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
343 location_t loc
= gimple_location (stmt
);
345 tree result
= NULL_TREE
;
347 switch (get_gimple_rhs_class (subcode
))
349 case GIMPLE_SINGLE_RHS
:
351 tree rhs
= gimple_assign_rhs1 (stmt
);
353 if (REFERENCE_CLASS_P (rhs
))
354 return maybe_fold_reference (rhs
, false);
356 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
358 tree ref
= TREE_OPERAND (rhs
, 0);
359 tree tem
= maybe_fold_reference (ref
, true);
361 && TREE_CODE (tem
) == MEM_REF
362 && integer_zerop (TREE_OPERAND (tem
, 1)))
363 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
365 result
= fold_convert (TREE_TYPE (rhs
),
366 build_fold_addr_expr_loc (loc
, tem
));
367 else if (TREE_CODE (ref
) == MEM_REF
368 && integer_zerop (TREE_OPERAND (ref
, 1)))
369 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
372 else if (TREE_CODE (rhs
) == CONSTRUCTOR
373 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
374 && (CONSTRUCTOR_NELTS (rhs
)
375 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
377 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
381 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
382 if (TREE_CODE (val
) != INTEGER_CST
383 && TREE_CODE (val
) != REAL_CST
384 && TREE_CODE (val
) != FIXED_CST
)
387 return build_vector_from_ctor (TREE_TYPE (rhs
),
388 CONSTRUCTOR_ELTS (rhs
));
391 else if (DECL_P (rhs
))
392 return get_symbol_constant_value (rhs
);
394 /* If we couldn't fold the RHS, hand over to the generic
396 if (result
== NULL_TREE
)
399 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
400 that may have been added by fold, and "useless" type
401 conversions that might now be apparent due to propagation. */
402 STRIP_USELESS_TYPE_CONVERSION (result
);
404 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
411 case GIMPLE_UNARY_RHS
:
413 tree rhs
= gimple_assign_rhs1 (stmt
);
415 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
418 /* If the operation was a conversion do _not_ mark a
419 resulting constant with TREE_OVERFLOW if the original
420 constant was not. These conversions have implementation
421 defined behavior and retaining the TREE_OVERFLOW flag
422 here would confuse later passes such as VRP. */
423 if (CONVERT_EXPR_CODE_P (subcode
)
424 && TREE_CODE (result
) == INTEGER_CST
425 && TREE_CODE (rhs
) == INTEGER_CST
)
426 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
428 STRIP_USELESS_TYPE_CONVERSION (result
);
429 if (valid_gimple_rhs_p (result
))
435 case GIMPLE_BINARY_RHS
:
436 /* Try to canonicalize for boolean-typed X the comparisons
437 X == 0, X == 1, X != 0, and X != 1. */
438 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
439 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
441 tree lhs
= gimple_assign_lhs (stmt
);
442 tree op1
= gimple_assign_rhs1 (stmt
);
443 tree op2
= gimple_assign_rhs2 (stmt
);
444 tree type
= TREE_TYPE (op1
);
446 /* Check whether the comparison operands are of the same boolean
447 type as the result type is.
448 Check that second operand is an integer-constant with value
450 if (TREE_CODE (op2
) == INTEGER_CST
451 && (integer_zerop (op2
) || integer_onep (op2
))
452 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
454 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
455 bool is_logical_not
= false;
457 /* X == 0 and X != 1 is a logical-not.of X
458 X == 1 and X != 0 is X */
459 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
460 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
461 is_logical_not
= true;
463 if (is_logical_not
== false)
465 /* Only for one-bit precision typed X the transformation
466 !X -> ~X is valied. */
467 else if (TYPE_PRECISION (type
) == 1)
468 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
470 /* Otherwise we use !X -> X ^ 1. */
472 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
473 type
, op1
, build_int_cst (type
, 1));
479 result
= fold_binary_loc (loc
, subcode
,
480 TREE_TYPE (gimple_assign_lhs (stmt
)),
481 gimple_assign_rhs1 (stmt
),
482 gimple_assign_rhs2 (stmt
));
486 STRIP_USELESS_TYPE_CONVERSION (result
);
487 if (valid_gimple_rhs_p (result
))
492 case GIMPLE_TERNARY_RHS
:
493 /* Try to fold a conditional expression. */
494 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
496 tree op0
= gimple_assign_rhs1 (stmt
);
499 location_t cond_loc
= gimple_location (stmt
);
501 if (COMPARISON_CLASS_P (op0
))
503 fold_defer_overflow_warnings ();
504 tem
= fold_binary_loc (cond_loc
,
505 TREE_CODE (op0
), TREE_TYPE (op0
),
506 TREE_OPERAND (op0
, 0),
507 TREE_OPERAND (op0
, 1));
508 /* This is actually a conditional expression, not a GIMPLE
509 conditional statement, however, the valid_gimple_rhs_p
510 test still applies. */
511 set
= (tem
&& is_gimple_condexpr (tem
)
512 && valid_gimple_rhs_p (tem
));
513 fold_undefer_overflow_warnings (set
, stmt
, 0);
515 else if (is_gimple_min_invariant (op0
))
524 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
525 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
526 gimple_assign_rhs2 (stmt
),
527 gimple_assign_rhs3 (stmt
));
531 result
= fold_ternary_loc (loc
, subcode
,
532 TREE_TYPE (gimple_assign_lhs (stmt
)),
533 gimple_assign_rhs1 (stmt
),
534 gimple_assign_rhs2 (stmt
),
535 gimple_assign_rhs3 (stmt
));
539 STRIP_USELESS_TYPE_CONVERSION (result
);
540 if (valid_gimple_rhs_p (result
))
545 case GIMPLE_INVALID_RHS
:
552 /* Attempt to fold a conditional statement. Return true if any changes were
553 made. We only attempt to fold the condition expression, and do not perform
554 any transformation that would require alteration of the cfg. It is
555 assumed that the operands have been previously folded. */
558 fold_gimple_cond (gimple stmt
)
560 tree result
= fold_binary_loc (gimple_location (stmt
),
561 gimple_cond_code (stmt
),
563 gimple_cond_lhs (stmt
),
564 gimple_cond_rhs (stmt
));
568 STRIP_USELESS_TYPE_CONVERSION (result
);
569 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
571 gimple_cond_set_condition_from_tree (stmt
, result
);
579 /* Convert EXPR into a GIMPLE value suitable for substitution on the
580 RHS of an assignment. Insert the necessary statements before
581 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
582 is replaced. If the call is expected to produces a result, then it
583 is replaced by an assignment of the new RHS to the result variable.
584 If the result is to be ignored, then the call is replaced by a
585 GIMPLE_NOP. A proper VDEF chain is retained by making the first
586 VUSE and the last VDEF of the whole sequence be the same as the replaced
587 statement and using new SSA names for stores in between. */
590 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
593 gimple stmt
, new_stmt
;
594 gimple_stmt_iterator i
;
595 gimple_seq stmts
= NULL
;
596 struct gimplify_ctx gctx
;
600 stmt
= gsi_stmt (*si_p
);
602 gcc_assert (is_gimple_call (stmt
));
604 push_gimplify_context (&gctx
);
605 gctx
.into_ssa
= gimple_in_ssa_p (cfun
);
607 lhs
= gimple_call_lhs (stmt
);
608 if (lhs
== NULL_TREE
)
610 gimplify_and_add (expr
, &stmts
);
611 /* We can end up with folding a memcpy of an empty class assignment
612 which gets optimized away by C++ gimplification. */
613 if (gimple_seq_empty_p (stmts
))
615 pop_gimplify_context (NULL
);
616 if (gimple_in_ssa_p (cfun
))
618 unlink_stmt_vdef (stmt
);
621 gsi_replace (si_p
, gimple_build_nop (), true);
627 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
628 new_stmt
= gimple_build_assign (lhs
, tmp
);
629 i
= gsi_last (stmts
);
630 gsi_insert_after_without_update (&i
, new_stmt
,
631 GSI_CONTINUE_LINKING
);
634 pop_gimplify_context (NULL
);
636 if (gimple_has_location (stmt
))
637 annotate_all_with_location (stmts
, gimple_location (stmt
));
639 /* First iterate over the replacement statements backward, assigning
640 virtual operands to their defining statements. */
642 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
644 new_stmt
= gsi_stmt (i
);
645 if ((gimple_assign_single_p (new_stmt
)
646 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
647 || (is_gimple_call (new_stmt
)
648 && (gimple_call_flags (new_stmt
)
649 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
653 vdef
= gimple_vdef (stmt
);
655 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
656 gimple_set_vdef (new_stmt
, vdef
);
657 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
658 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
659 laststore
= new_stmt
;
663 /* Second iterate over the statements forward, assigning virtual
664 operands to their uses. */
665 reaching_vuse
= gimple_vuse (stmt
);
666 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
668 new_stmt
= gsi_stmt (i
);
669 /* If the new statement possibly has a VUSE, update it with exact SSA
670 name we know will reach this one. */
671 if (gimple_has_mem_ops (new_stmt
))
672 gimple_set_vuse (new_stmt
, reaching_vuse
);
673 gimple_set_modified (new_stmt
, true);
674 if (gimple_vdef (new_stmt
))
675 reaching_vuse
= gimple_vdef (new_stmt
);
678 /* If the new sequence does not do a store release the virtual
679 definition of the original statement. */
681 && reaching_vuse
== gimple_vuse (stmt
))
683 tree vdef
= gimple_vdef (stmt
);
685 && TREE_CODE (vdef
) == SSA_NAME
)
687 unlink_stmt_vdef (stmt
);
688 release_ssa_name (vdef
);
692 /* Finally replace the original statement with the sequence. */
693 gsi_replace_with_seq (si_p
, stmts
, false);
696 /* Return the string length, maximum string length or maximum value of
698 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
699 is not NULL and, for TYPE == 0, its value is not equal to the length
700 we determine or if we are unable to determine the length or value,
701 return false. VISITED is a bitmap of visited variables.
702 TYPE is 0 if string length should be returned, 1 for maximum string
703 length and 2 for maximum value ARG can have. */
706 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
711 if (TREE_CODE (arg
) != SSA_NAME
)
713 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
714 if (TREE_CODE (arg
) == ADDR_EXPR
715 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
716 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
718 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
719 if (TREE_CODE (aop0
) == INDIRECT_REF
720 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
721 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
722 length
, visited
, type
);
728 if (TREE_CODE (val
) != INTEGER_CST
729 || tree_int_cst_sgn (val
) < 0)
733 val
= c_strlen (arg
, 1);
741 if (TREE_CODE (*length
) != INTEGER_CST
742 || TREE_CODE (val
) != INTEGER_CST
)
745 if (tree_int_cst_lt (*length
, val
))
749 else if (simple_cst_equal (val
, *length
) != 1)
757 /* If ARG is registered for SSA update we cannot look at its defining
759 if (name_registered_for_update_p (arg
))
762 /* If we were already here, break the infinite cycle. */
763 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
767 def_stmt
= SSA_NAME_DEF_STMT (var
);
769 switch (gimple_code (def_stmt
))
772 /* The RHS of the statement defining VAR must either have a
773 constant length or come from another SSA_NAME with a constant
775 if (gimple_assign_single_p (def_stmt
)
776 || gimple_assign_unary_nop_p (def_stmt
))
778 tree rhs
= gimple_assign_rhs1 (def_stmt
);
779 return get_maxval_strlen (rhs
, length
, visited
, type
);
781 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
783 tree op2
= gimple_assign_rhs2 (def_stmt
);
784 tree op3
= gimple_assign_rhs3 (def_stmt
);
785 return get_maxval_strlen (op2
, length
, visited
, type
)
786 && get_maxval_strlen (op3
, length
, visited
, type
);
792 /* All the arguments of the PHI node must have the same constant
796 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
798 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
800 /* If this PHI has itself as an argument, we cannot
801 determine the string length of this argument. However,
802 if we can find a constant string length for the other
803 PHI args then we can still be sure that this is a
804 constant string length. So be optimistic and just
805 continue with the next argument. */
806 if (arg
== gimple_phi_result (def_stmt
))
809 if (!get_maxval_strlen (arg
, length
, visited
, type
))
821 /* Fold builtin call in statement STMT. Returns a simplified tree.
822 We may return a non-constant expression, including another call
823 to a different function and with different arguments, e.g.,
824 substituting memcpy for strcpy when the string length is known.
825 Note that some builtins expand into inline code that may not
826 be valid in GIMPLE. Callers must take care. */
829 gimple_fold_builtin (gimple stmt
)
837 location_t loc
= gimple_location (stmt
);
839 gcc_assert (is_gimple_call (stmt
));
841 ignore
= (gimple_call_lhs (stmt
) == NULL
);
843 /* First try the generic builtin folder. If that succeeds, return the
845 result
= fold_call_stmt (stmt
, ignore
);
853 /* Ignore MD builtins. */
854 callee
= gimple_call_fndecl (stmt
);
855 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
858 /* Give up for always_inline inline builtins until they are
860 if (avoid_folding_inline_builtin (callee
))
863 /* If the builtin could not be folded, and it has no argument list,
865 nargs
= gimple_call_num_args (stmt
);
869 /* Limit the work only for builtins we know how to simplify. */
870 switch (DECL_FUNCTION_CODE (callee
))
872 case BUILT_IN_STRLEN
:
874 case BUILT_IN_FPUTS_UNLOCKED
:
878 case BUILT_IN_STRCPY
:
879 case BUILT_IN_STRNCPY
:
883 case BUILT_IN_MEMCPY_CHK
:
884 case BUILT_IN_MEMPCPY_CHK
:
885 case BUILT_IN_MEMMOVE_CHK
:
886 case BUILT_IN_MEMSET_CHK
:
887 case BUILT_IN_STRNCPY_CHK
:
888 case BUILT_IN_STPNCPY_CHK
:
892 case BUILT_IN_STRCPY_CHK
:
893 case BUILT_IN_STPCPY_CHK
:
897 case BUILT_IN_SNPRINTF_CHK
:
898 case BUILT_IN_VSNPRINTF_CHK
:
906 if (arg_idx
>= nargs
)
909 /* Try to use the dataflow information gathered by the CCP process. */
910 visited
= BITMAP_ALLOC (NULL
);
911 bitmap_clear (visited
);
913 memset (val
, 0, sizeof (val
));
914 a
= gimple_call_arg (stmt
, arg_idx
);
915 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
916 val
[arg_idx
] = NULL_TREE
;
918 BITMAP_FREE (visited
);
921 switch (DECL_FUNCTION_CODE (callee
))
923 case BUILT_IN_STRLEN
:
924 if (val
[0] && nargs
== 1)
927 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
929 /* If the result is not a valid gimple value, or not a cast
930 of a valid gimple value, then we cannot use the result. */
931 if (is_gimple_val (new_val
)
932 || (CONVERT_EXPR_P (new_val
)
933 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
938 case BUILT_IN_STRCPY
:
939 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
940 result
= fold_builtin_strcpy (loc
, callee
,
941 gimple_call_arg (stmt
, 0),
942 gimple_call_arg (stmt
, 1),
946 case BUILT_IN_STRNCPY
:
947 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
948 result
= fold_builtin_strncpy (loc
, callee
,
949 gimple_call_arg (stmt
, 0),
950 gimple_call_arg (stmt
, 1),
951 gimple_call_arg (stmt
, 2),
957 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
958 gimple_call_arg (stmt
, 1),
959 ignore
, false, val
[0]);
962 case BUILT_IN_FPUTS_UNLOCKED
:
964 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
965 gimple_call_arg (stmt
, 1),
966 ignore
, true, val
[0]);
969 case BUILT_IN_MEMCPY_CHK
:
970 case BUILT_IN_MEMPCPY_CHK
:
971 case BUILT_IN_MEMMOVE_CHK
:
972 case BUILT_IN_MEMSET_CHK
:
973 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
974 result
= fold_builtin_memory_chk (loc
, callee
,
975 gimple_call_arg (stmt
, 0),
976 gimple_call_arg (stmt
, 1),
977 gimple_call_arg (stmt
, 2),
978 gimple_call_arg (stmt
, 3),
980 DECL_FUNCTION_CODE (callee
));
983 case BUILT_IN_STRCPY_CHK
:
984 case BUILT_IN_STPCPY_CHK
:
985 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
986 result
= fold_builtin_stxcpy_chk (loc
, callee
,
987 gimple_call_arg (stmt
, 0),
988 gimple_call_arg (stmt
, 1),
989 gimple_call_arg (stmt
, 2),
991 DECL_FUNCTION_CODE (callee
));
994 case BUILT_IN_STRNCPY_CHK
:
995 case BUILT_IN_STPNCPY_CHK
:
996 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
997 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
998 gimple_call_arg (stmt
, 1),
999 gimple_call_arg (stmt
, 2),
1000 gimple_call_arg (stmt
, 3),
1002 DECL_FUNCTION_CODE (callee
));
1005 case BUILT_IN_SNPRINTF_CHK
:
1006 case BUILT_IN_VSNPRINTF_CHK
:
1007 if (val
[1] && is_gimple_val (val
[1]))
1008 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1009 DECL_FUNCTION_CODE (callee
));
1016 if (result
&& ignore
)
1017 result
= fold_ignored_result (result
);
1022 /* Return a binfo to be used for devirtualization of calls based on an object
1023 represented by a declaration (i.e. a global or automatically allocated one)
1024 or NULL if it cannot be found or is not safe. CST is expected to be an
1025 ADDR_EXPR of such object or the function will return NULL. Currently it is
1026 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1027 EXPECTED_TYPE is type of the class virtual belongs to. */
1030 gimple_extract_devirt_binfo_from_cst (tree cst
, tree expected_type
)
1032 HOST_WIDE_INT offset
, size
, max_size
;
1033 tree base
, type
, binfo
;
1034 bool last_artificial
= false;
1036 if (!flag_devirtualize
1037 || TREE_CODE (cst
) != ADDR_EXPR
1038 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1041 cst
= TREE_OPERAND (cst
, 0);
1042 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1043 type
= TREE_TYPE (base
);
1047 || TREE_CODE (type
) != RECORD_TYPE
)
1050 /* Find the sub-object the constant actually refers to and mark whether it is
1051 an artificial one (as opposed to a user-defined one). */
1054 HOST_WIDE_INT pos
, size
;
1057 if (types_same_for_odr (type
, expected_type
))
1062 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1064 if (TREE_CODE (fld
) != FIELD_DECL
)
1067 pos
= int_bit_position (fld
);
1068 size
= tree_low_cst (DECL_SIZE (fld
), 1);
1069 if (pos
<= offset
&& (pos
+ size
) > offset
)
1072 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1075 last_artificial
= DECL_ARTIFICIAL (fld
);
1076 type
= TREE_TYPE (fld
);
1079 /* Artificial sub-objects are ancestors, we do not want to use them for
1080 devirtualization, at least not here. */
1081 if (last_artificial
)
1083 binfo
= TYPE_BINFO (type
);
1084 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1090 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1091 The statement may be replaced by another statement, e.g., if the call
1092 simplifies to a constant value. Return true if any changes were made.
1093 It is assumed that the operands have been previously folded. */
1096 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1098 gimple stmt
= gsi_stmt (*gsi
);
1100 bool changed
= false;
1103 /* Fold *& in call arguments. */
1104 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1105 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1107 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1110 gimple_call_set_arg (stmt
, i
, tmp
);
1115 /* Check for virtual calls that became direct calls. */
1116 callee
= gimple_call_fn (stmt
);
1117 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1119 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1121 if (dump_file
&& virtual_method_call_p (callee
)
1122 && !possible_polymorphic_call_target_p
1123 (callee
, cgraph_get_node (gimple_call_addr_fndecl
1124 (OBJ_TYPE_REF_EXPR (callee
)))))
1127 "Type inheritnace inconsistent devirtualization of ");
1128 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1129 fprintf (dump_file
, " to ");
1130 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
1131 fprintf (dump_file
, "\n");
1134 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1137 else if (virtual_method_call_p (callee
))
1139 tree obj
= OBJ_TYPE_REF_OBJECT (callee
);
1140 tree binfo
= gimple_extract_devirt_binfo_from_cst
1141 (obj
, obj_type_ref_class (callee
));
1145 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1146 tree fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
);
1149 #ifdef ENABLE_CHECKING
1150 gcc_assert (possible_polymorphic_call_target_p
1151 (callee
, cgraph_get_node (fndecl
)));
1154 gimple_call_set_fndecl (stmt
, fndecl
);
1164 /* Check for builtins that CCP can handle using information not
1165 available in the generic fold routines. */
1166 callee
= gimple_call_fndecl (stmt
);
1167 if (callee
&& DECL_BUILT_IN (callee
))
1169 tree result
= gimple_fold_builtin (stmt
);
1172 if (!update_call_from_tree (gsi
, result
))
1173 gimplify_and_update_call_from_tree (gsi
, result
);
1176 else if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1177 changed
|= targetm
.gimple_fold_builtin (gsi
);
1183 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1184 distinguishes both cases. */
1187 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1189 bool changed
= false;
1190 gimple stmt
= gsi_stmt (*gsi
);
1193 /* Fold the main computation performed by the statement. */
1194 switch (gimple_code (stmt
))
1198 unsigned old_num_ops
= gimple_num_ops (stmt
);
1199 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1200 tree lhs
= gimple_assign_lhs (stmt
);
1202 /* First canonicalize operand order. This avoids building new
1203 trees if this is the only thing fold would later do. */
1204 if ((commutative_tree_code (subcode
)
1205 || commutative_ternary_tree_code (subcode
))
1206 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1207 gimple_assign_rhs2 (stmt
), false))
1209 tree tem
= gimple_assign_rhs1 (stmt
);
1210 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1211 gimple_assign_set_rhs2 (stmt
, tem
);
1214 new_rhs
= fold_gimple_assign (gsi
);
1216 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1217 TREE_TYPE (new_rhs
)))
1218 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1221 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1223 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1230 changed
|= fold_gimple_cond (stmt
);
1234 changed
|= gimple_fold_call (gsi
, inplace
);
1238 /* Fold *& in asm operands. */
1241 const char **oconstraints
;
1242 const char *constraint
;
1243 bool allows_mem
, allows_reg
;
1245 noutputs
= gimple_asm_noutputs (stmt
);
1246 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1248 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1250 tree link
= gimple_asm_output_op (stmt
, i
);
1251 tree op
= TREE_VALUE (link
);
1253 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1254 if (REFERENCE_CLASS_P (op
)
1255 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1257 TREE_VALUE (link
) = op
;
1261 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1263 tree link
= gimple_asm_input_op (stmt
, i
);
1264 tree op
= TREE_VALUE (link
);
1266 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1267 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1268 oconstraints
, &allows_mem
, &allows_reg
);
1269 if (REFERENCE_CLASS_P (op
)
1270 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1273 TREE_VALUE (link
) = op
;
1281 if (gimple_debug_bind_p (stmt
))
1283 tree val
= gimple_debug_bind_get_value (stmt
);
1285 && REFERENCE_CLASS_P (val
))
1287 tree tem
= maybe_fold_reference (val
, false);
1290 gimple_debug_bind_set_value (stmt
, tem
);
1295 && TREE_CODE (val
) == ADDR_EXPR
)
1297 tree ref
= TREE_OPERAND (val
, 0);
1298 tree tem
= maybe_fold_reference (ref
, false);
1301 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1302 gimple_debug_bind_set_value (stmt
, tem
);
1312 stmt
= gsi_stmt (*gsi
);
1314 /* Fold *& on the lhs. */
1315 if (gimple_has_lhs (stmt
))
1317 tree lhs
= gimple_get_lhs (stmt
);
1318 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1320 tree new_lhs
= maybe_fold_reference (lhs
, true);
1323 gimple_set_lhs (stmt
, new_lhs
);
1332 /* Fold the statement pointed to by GSI. In some cases, this function may
1333 replace the whole statement with a new one. Returns true iff folding
1335 The statement pointed to by GSI should be in valid gimple form but may
1336 be in unfolded state as resulting from for example constant propagation
1337 which can produce *&x = 0. */
1340 fold_stmt (gimple_stmt_iterator
*gsi
)
1342 return fold_stmt_1 (gsi
, false);
1345 /* Perform the minimal folding on statement *GSI. Only operations like
1346 *&x created by constant propagation are handled. The statement cannot
1347 be replaced with a new one. Return true if the statement was
1348 changed, false otherwise.
1349 The statement *GSI should be in valid gimple form but may
1350 be in unfolded state as resulting from for example constant propagation
1351 which can produce *&x = 0. */
1354 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1356 gimple stmt
= gsi_stmt (*gsi
);
1357 bool changed
= fold_stmt_1 (gsi
, true);
1358 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1362 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1363 if EXPR is null or we don't know how.
1364 If non-null, the result always has boolean type. */
1367 canonicalize_bool (tree expr
, bool invert
)
1373 if (integer_nonzerop (expr
))
1374 return boolean_false_node
;
1375 else if (integer_zerop (expr
))
1376 return boolean_true_node
;
1377 else if (TREE_CODE (expr
) == SSA_NAME
)
1378 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1379 build_int_cst (TREE_TYPE (expr
), 0));
1380 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1381 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1383 TREE_OPERAND (expr
, 0),
1384 TREE_OPERAND (expr
, 1));
1390 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1392 if (integer_nonzerop (expr
))
1393 return boolean_true_node
;
1394 else if (integer_zerop (expr
))
1395 return boolean_false_node
;
1396 else if (TREE_CODE (expr
) == SSA_NAME
)
1397 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1398 build_int_cst (TREE_TYPE (expr
), 0));
1399 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1400 return fold_build2 (TREE_CODE (expr
),
1402 TREE_OPERAND (expr
, 0),
1403 TREE_OPERAND (expr
, 1));
1409 /* Check to see if a boolean expression EXPR is logically equivalent to the
1410 comparison (OP1 CODE OP2). Check for various identities involving
1414 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1415 const_tree op1
, const_tree op2
)
1419 /* The obvious case. */
1420 if (TREE_CODE (expr
) == code
1421 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1422 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1425 /* Check for comparing (name, name != 0) and the case where expr
1426 is an SSA_NAME with a definition matching the comparison. */
1427 if (TREE_CODE (expr
) == SSA_NAME
1428 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1430 if (operand_equal_p (expr
, op1
, 0))
1431 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1432 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1433 s
= SSA_NAME_DEF_STMT (expr
);
1434 if (is_gimple_assign (s
)
1435 && gimple_assign_rhs_code (s
) == code
1436 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1437 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1441 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1442 of name is a comparison, recurse. */
1443 if (TREE_CODE (op1
) == SSA_NAME
1444 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1446 s
= SSA_NAME_DEF_STMT (op1
);
1447 if (is_gimple_assign (s
)
1448 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1450 enum tree_code c
= gimple_assign_rhs_code (s
);
1451 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1452 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1453 return same_bool_comparison_p (expr
, c
,
1454 gimple_assign_rhs1 (s
),
1455 gimple_assign_rhs2 (s
));
1456 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1457 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1458 return same_bool_comparison_p (expr
,
1459 invert_tree_comparison (c
, false),
1460 gimple_assign_rhs1 (s
),
1461 gimple_assign_rhs2 (s
));
1467 /* Check to see if two boolean expressions OP1 and OP2 are logically
1471 same_bool_result_p (const_tree op1
, const_tree op2
)
1473 /* Simple cases first. */
1474 if (operand_equal_p (op1
, op2
, 0))
1477 /* Check the cases where at least one of the operands is a comparison.
1478 These are a bit smarter than operand_equal_p in that they apply some
1479 identifies on SSA_NAMEs. */
1480 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1481 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1482 TREE_OPERAND (op2
, 0),
1483 TREE_OPERAND (op2
, 1)))
1485 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1486 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1487 TREE_OPERAND (op1
, 0),
1488 TREE_OPERAND (op1
, 1)))
1495 /* Forward declarations for some mutually recursive functions. */
1498 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1499 enum tree_code code2
, tree op2a
, tree op2b
);
1501 and_var_with_comparison (tree var
, bool invert
,
1502 enum tree_code code2
, tree op2a
, tree op2b
);
1504 and_var_with_comparison_1 (gimple stmt
,
1505 enum tree_code code2
, tree op2a
, tree op2b
);
1507 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1508 enum tree_code code2
, tree op2a
, tree op2b
);
1510 or_var_with_comparison (tree var
, bool invert
,
1511 enum tree_code code2
, tree op2a
, tree op2b
);
1513 or_var_with_comparison_1 (gimple stmt
,
1514 enum tree_code code2
, tree op2a
, tree op2b
);
1516 /* Helper function for and_comparisons_1: try to simplify the AND of the
1517 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1518 If INVERT is true, invert the value of the VAR before doing the AND.
1519 Return NULL_EXPR if we can't simplify this to a single expression. */
1522 and_var_with_comparison (tree var
, bool invert
,
1523 enum tree_code code2
, tree op2a
, tree op2b
)
1526 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1528 /* We can only deal with variables whose definitions are assignments. */
1529 if (!is_gimple_assign (stmt
))
1532 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1533 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1534 Then we only have to consider the simpler non-inverted cases. */
1536 t
= or_var_with_comparison_1 (stmt
,
1537 invert_tree_comparison (code2
, false),
1540 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1541 return canonicalize_bool (t
, invert
);
1544 /* Try to simplify the AND of the ssa variable defined by the assignment
1545 STMT with the comparison specified by (OP2A CODE2 OP2B).
1546 Return NULL_EXPR if we can't simplify this to a single expression. */
1549 and_var_with_comparison_1 (gimple stmt
,
1550 enum tree_code code2
, tree op2a
, tree op2b
)
1552 tree var
= gimple_assign_lhs (stmt
);
1553 tree true_test_var
= NULL_TREE
;
1554 tree false_test_var
= NULL_TREE
;
1555 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1557 /* Check for identities like (var AND (var == 0)) => false. */
1558 if (TREE_CODE (op2a
) == SSA_NAME
1559 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1561 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1562 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1564 true_test_var
= op2a
;
1565 if (var
== true_test_var
)
1568 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1569 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1571 false_test_var
= op2a
;
1572 if (var
== false_test_var
)
1573 return boolean_false_node
;
1577 /* If the definition is a comparison, recurse on it. */
1578 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1580 tree t
= and_comparisons_1 (innercode
,
1581 gimple_assign_rhs1 (stmt
),
1582 gimple_assign_rhs2 (stmt
),
1590 /* If the definition is an AND or OR expression, we may be able to
1591 simplify by reassociating. */
1592 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1593 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1595 tree inner1
= gimple_assign_rhs1 (stmt
);
1596 tree inner2
= gimple_assign_rhs2 (stmt
);
1599 tree partial
= NULL_TREE
;
1600 bool is_and
= (innercode
== BIT_AND_EXPR
);
1602 /* Check for boolean identities that don't require recursive examination
1604 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1605 inner1 AND (inner1 OR inner2) => inner1
1606 !inner1 AND (inner1 AND inner2) => false
1607 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1608 Likewise for similar cases involving inner2. */
1609 if (inner1
== true_test_var
)
1610 return (is_and
? var
: inner1
);
1611 else if (inner2
== true_test_var
)
1612 return (is_and
? var
: inner2
);
1613 else if (inner1
== false_test_var
)
1615 ? boolean_false_node
1616 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1617 else if (inner2
== false_test_var
)
1619 ? boolean_false_node
1620 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1622 /* Next, redistribute/reassociate the AND across the inner tests.
1623 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1624 if (TREE_CODE (inner1
) == SSA_NAME
1625 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1626 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1627 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1628 gimple_assign_rhs1 (s
),
1629 gimple_assign_rhs2 (s
),
1630 code2
, op2a
, op2b
)))
1632 /* Handle the AND case, where we are reassociating:
1633 (inner1 AND inner2) AND (op2a code2 op2b)
1635 If the partial result t is a constant, we win. Otherwise
1636 continue on to try reassociating with the other inner test. */
1639 if (integer_onep (t
))
1641 else if (integer_zerop (t
))
1642 return boolean_false_node
;
1645 /* Handle the OR case, where we are redistributing:
1646 (inner1 OR inner2) AND (op2a code2 op2b)
1647 => (t OR (inner2 AND (op2a code2 op2b))) */
1648 else if (integer_onep (t
))
1649 return boolean_true_node
;
1651 /* Save partial result for later. */
1655 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1656 if (TREE_CODE (inner2
) == SSA_NAME
1657 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1658 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1659 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1660 gimple_assign_rhs1 (s
),
1661 gimple_assign_rhs2 (s
),
1662 code2
, op2a
, op2b
)))
1664 /* Handle the AND case, where we are reassociating:
1665 (inner1 AND inner2) AND (op2a code2 op2b)
1666 => (inner1 AND t) */
1669 if (integer_onep (t
))
1671 else if (integer_zerop (t
))
1672 return boolean_false_node
;
1673 /* If both are the same, we can apply the identity
1675 else if (partial
&& same_bool_result_p (t
, partial
))
1679 /* Handle the OR case. where we are redistributing:
1680 (inner1 OR inner2) AND (op2a code2 op2b)
1681 => (t OR (inner1 AND (op2a code2 op2b)))
1682 => (t OR partial) */
1685 if (integer_onep (t
))
1686 return boolean_true_node
;
1689 /* We already got a simplification for the other
1690 operand to the redistributed OR expression. The
1691 interesting case is when at least one is false.
1692 Or, if both are the same, we can apply the identity
1694 if (integer_zerop (partial
))
1696 else if (integer_zerop (t
))
1698 else if (same_bool_result_p (t
, partial
))
1707 /* Try to simplify the AND of two comparisons defined by
1708 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1709 If this can be done without constructing an intermediate value,
1710 return the resulting tree; otherwise NULL_TREE is returned.
1711 This function is deliberately asymmetric as it recurses on SSA_DEFs
1712 in the first comparison but not the second. */
1715 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1716 enum tree_code code2
, tree op2a
, tree op2b
)
1718 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
1720 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1721 if (operand_equal_p (op1a
, op2a
, 0)
1722 && operand_equal_p (op1b
, op2b
, 0))
1724 /* Result will be either NULL_TREE, or a combined comparison. */
1725 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1726 TRUTH_ANDIF_EXPR
, code1
, code2
,
1727 truth_type
, op1a
, op1b
);
1732 /* Likewise the swapped case of the above. */
1733 if (operand_equal_p (op1a
, op2b
, 0)
1734 && operand_equal_p (op1b
, op2a
, 0))
1736 /* Result will be either NULL_TREE, or a combined comparison. */
1737 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1738 TRUTH_ANDIF_EXPR
, code1
,
1739 swap_tree_comparison (code2
),
1740 truth_type
, op1a
, op1b
);
1745 /* If both comparisons are of the same value against constants, we might
1746 be able to merge them. */
1747 if (operand_equal_p (op1a
, op2a
, 0)
1748 && TREE_CODE (op1b
) == INTEGER_CST
1749 && TREE_CODE (op2b
) == INTEGER_CST
)
1751 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1753 /* If we have (op1a == op1b), we should either be able to
1754 return that or FALSE, depending on whether the constant op1b
1755 also satisfies the other comparison against op2b. */
1756 if (code1
== EQ_EXPR
)
1762 case EQ_EXPR
: val
= (cmp
== 0); break;
1763 case NE_EXPR
: val
= (cmp
!= 0); break;
1764 case LT_EXPR
: val
= (cmp
< 0); break;
1765 case GT_EXPR
: val
= (cmp
> 0); break;
1766 case LE_EXPR
: val
= (cmp
<= 0); break;
1767 case GE_EXPR
: val
= (cmp
>= 0); break;
1768 default: done
= false;
1773 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1775 return boolean_false_node
;
1778 /* Likewise if the second comparison is an == comparison. */
1779 else if (code2
== EQ_EXPR
)
1785 case EQ_EXPR
: val
= (cmp
== 0); break;
1786 case NE_EXPR
: val
= (cmp
!= 0); break;
1787 case LT_EXPR
: val
= (cmp
> 0); break;
1788 case GT_EXPR
: val
= (cmp
< 0); break;
1789 case LE_EXPR
: val
= (cmp
>= 0); break;
1790 case GE_EXPR
: val
= (cmp
<= 0); break;
1791 default: done
= false;
1796 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1798 return boolean_false_node
;
1802 /* Same business with inequality tests. */
1803 else if (code1
== NE_EXPR
)
1808 case EQ_EXPR
: val
= (cmp
!= 0); break;
1809 case NE_EXPR
: val
= (cmp
== 0); break;
1810 case LT_EXPR
: val
= (cmp
>= 0); break;
1811 case GT_EXPR
: val
= (cmp
<= 0); break;
1812 case LE_EXPR
: val
= (cmp
> 0); break;
1813 case GE_EXPR
: val
= (cmp
< 0); break;
1818 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1820 else if (code2
== NE_EXPR
)
1825 case EQ_EXPR
: val
= (cmp
== 0); break;
1826 case NE_EXPR
: val
= (cmp
!= 0); break;
1827 case LT_EXPR
: val
= (cmp
<= 0); break;
1828 case GT_EXPR
: val
= (cmp
>= 0); break;
1829 case LE_EXPR
: val
= (cmp
< 0); break;
1830 case GE_EXPR
: val
= (cmp
> 0); break;
1835 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1838 /* Chose the more restrictive of two < or <= comparisons. */
1839 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1840 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1842 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1843 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1845 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1848 /* Likewise chose the more restrictive of two > or >= comparisons. */
1849 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1850 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1852 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1853 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1855 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1858 /* Check for singleton ranges. */
1860 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1861 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1862 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1864 /* Check for disjoint ranges. */
1866 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1867 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1868 return boolean_false_node
;
1870 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1871 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1872 return boolean_false_node
;
1875 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1876 NAME's definition is a truth value. See if there are any simplifications
1877 that can be done against the NAME's definition. */
1878 if (TREE_CODE (op1a
) == SSA_NAME
1879 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1880 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1882 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1883 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1884 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1885 switch (gimple_code (stmt
))
1888 /* Try to simplify by copy-propagating the definition. */
1889 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1892 /* If every argument to the PHI produces the same result when
1893 ANDed with the second comparison, we win.
1894 Do not do this unless the type is bool since we need a bool
1895 result here anyway. */
1896 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1898 tree result
= NULL_TREE
;
1900 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1902 tree arg
= gimple_phi_arg_def (stmt
, i
);
1904 /* If this PHI has itself as an argument, ignore it.
1905 If all the other args produce the same result,
1907 if (arg
== gimple_phi_result (stmt
))
1909 else if (TREE_CODE (arg
) == INTEGER_CST
)
1911 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1914 result
= boolean_false_node
;
1915 else if (!integer_zerop (result
))
1919 result
= fold_build2 (code2
, boolean_type_node
,
1921 else if (!same_bool_comparison_p (result
,
1925 else if (TREE_CODE (arg
) == SSA_NAME
1926 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1929 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1930 /* In simple cases we can look through PHI nodes,
1931 but we have to be careful with loops.
1933 if (! dom_info_available_p (CDI_DOMINATORS
)
1934 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1935 || dominated_by_p (CDI_DOMINATORS
,
1936 gimple_bb (def_stmt
),
1939 temp
= and_var_with_comparison (arg
, invert
, code2
,
1945 else if (!same_bool_result_p (result
, temp
))
1961 /* Try to simplify the AND of two comparisons, specified by
1962 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1963 If this can be simplified to a single expression (without requiring
1964 introducing more SSA variables to hold intermediate values),
1965 return the resulting tree. Otherwise return NULL_TREE.
1966 If the result expression is non-null, it has boolean type. */
1969 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1970 enum tree_code code2
, tree op2a
, tree op2b
)
1972 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1976 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1979 /* Helper function for or_comparisons_1: try to simplify the OR of the
1980 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1981 If INVERT is true, invert the value of VAR before doing the OR.
1982 Return NULL_EXPR if we can't simplify this to a single expression. */
1985 or_var_with_comparison (tree var
, bool invert
,
1986 enum tree_code code2
, tree op2a
, tree op2b
)
1989 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1991 /* We can only deal with variables whose definitions are assignments. */
1992 if (!is_gimple_assign (stmt
))
1995 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1996 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1997 Then we only have to consider the simpler non-inverted cases. */
1999 t
= and_var_with_comparison_1 (stmt
,
2000 invert_tree_comparison (code2
, false),
2003 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2004 return canonicalize_bool (t
, invert
);
2007 /* Try to simplify the OR of the ssa variable defined by the assignment
2008 STMT with the comparison specified by (OP2A CODE2 OP2B).
2009 Return NULL_EXPR if we can't simplify this to a single expression. */
2012 or_var_with_comparison_1 (gimple stmt
,
2013 enum tree_code code2
, tree op2a
, tree op2b
)
2015 tree var
= gimple_assign_lhs (stmt
);
2016 tree true_test_var
= NULL_TREE
;
2017 tree false_test_var
= NULL_TREE
;
2018 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2020 /* Check for identities like (var OR (var != 0)) => true . */
2021 if (TREE_CODE (op2a
) == SSA_NAME
2022 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2024 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2025 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2027 true_test_var
= op2a
;
2028 if (var
== true_test_var
)
2031 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2032 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2034 false_test_var
= op2a
;
2035 if (var
== false_test_var
)
2036 return boolean_true_node
;
2040 /* If the definition is a comparison, recurse on it. */
2041 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2043 tree t
= or_comparisons_1 (innercode
,
2044 gimple_assign_rhs1 (stmt
),
2045 gimple_assign_rhs2 (stmt
),
2053 /* If the definition is an AND or OR expression, we may be able to
2054 simplify by reassociating. */
2055 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2056 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2058 tree inner1
= gimple_assign_rhs1 (stmt
);
2059 tree inner2
= gimple_assign_rhs2 (stmt
);
2062 tree partial
= NULL_TREE
;
2063 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2065 /* Check for boolean identities that don't require recursive examination
2067 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2068 inner1 OR (inner1 AND inner2) => inner1
2069 !inner1 OR (inner1 OR inner2) => true
2070 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2072 if (inner1
== true_test_var
)
2073 return (is_or
? var
: inner1
);
2074 else if (inner2
== true_test_var
)
2075 return (is_or
? var
: inner2
);
2076 else if (inner1
== false_test_var
)
2079 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2080 else if (inner2
== false_test_var
)
2083 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2085 /* Next, redistribute/reassociate the OR across the inner tests.
2086 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2087 if (TREE_CODE (inner1
) == SSA_NAME
2088 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2089 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2090 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2091 gimple_assign_rhs1 (s
),
2092 gimple_assign_rhs2 (s
),
2093 code2
, op2a
, op2b
)))
2095 /* Handle the OR case, where we are reassociating:
2096 (inner1 OR inner2) OR (op2a code2 op2b)
2098 If the partial result t is a constant, we win. Otherwise
2099 continue on to try reassociating with the other inner test. */
2102 if (integer_onep (t
))
2103 return boolean_true_node
;
2104 else if (integer_zerop (t
))
2108 /* Handle the AND case, where we are redistributing:
2109 (inner1 AND inner2) OR (op2a code2 op2b)
2110 => (t AND (inner2 OR (op2a code op2b))) */
2111 else if (integer_zerop (t
))
2112 return boolean_false_node
;
2114 /* Save partial result for later. */
2118 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2119 if (TREE_CODE (inner2
) == SSA_NAME
2120 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2121 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2122 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2123 gimple_assign_rhs1 (s
),
2124 gimple_assign_rhs2 (s
),
2125 code2
, op2a
, op2b
)))
2127 /* Handle the OR case, where we are reassociating:
2128 (inner1 OR inner2) OR (op2a code2 op2b)
2130 => (t OR partial) */
2133 if (integer_zerop (t
))
2135 else if (integer_onep (t
))
2136 return boolean_true_node
;
2137 /* If both are the same, we can apply the identity
2139 else if (partial
&& same_bool_result_p (t
, partial
))
2143 /* Handle the AND case, where we are redistributing:
2144 (inner1 AND inner2) OR (op2a code2 op2b)
2145 => (t AND (inner1 OR (op2a code2 op2b)))
2146 => (t AND partial) */
2149 if (integer_zerop (t
))
2150 return boolean_false_node
;
2153 /* We already got a simplification for the other
2154 operand to the redistributed AND expression. The
2155 interesting case is when at least one is true.
2156 Or, if both are the same, we can apply the identity
2158 if (integer_onep (partial
))
2160 else if (integer_onep (t
))
2162 else if (same_bool_result_p (t
, partial
))
2171 /* Try to simplify the OR of two comparisons defined by
2172 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2173 If this can be done without constructing an intermediate value,
2174 return the resulting tree; otherwise NULL_TREE is returned.
2175 This function is deliberately asymmetric as it recurses on SSA_DEFs
2176 in the first comparison but not the second. */
2179 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2180 enum tree_code code2
, tree op2a
, tree op2b
)
2182 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
2184 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2185 if (operand_equal_p (op1a
, op2a
, 0)
2186 && operand_equal_p (op1b
, op2b
, 0))
2188 /* Result will be either NULL_TREE, or a combined comparison. */
2189 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2190 TRUTH_ORIF_EXPR
, code1
, code2
,
2191 truth_type
, op1a
, op1b
);
2196 /* Likewise the swapped case of the above. */
2197 if (operand_equal_p (op1a
, op2b
, 0)
2198 && operand_equal_p (op1b
, op2a
, 0))
2200 /* Result will be either NULL_TREE, or a combined comparison. */
2201 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2202 TRUTH_ORIF_EXPR
, code1
,
2203 swap_tree_comparison (code2
),
2204 truth_type
, op1a
, op1b
);
2209 /* If both comparisons are of the same value against constants, we might
2210 be able to merge them. */
2211 if (operand_equal_p (op1a
, op2a
, 0)
2212 && TREE_CODE (op1b
) == INTEGER_CST
2213 && TREE_CODE (op2b
) == INTEGER_CST
)
2215 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2217 /* If we have (op1a != op1b), we should either be able to
2218 return that or TRUE, depending on whether the constant op1b
2219 also satisfies the other comparison against op2b. */
2220 if (code1
== NE_EXPR
)
2226 case EQ_EXPR
: val
= (cmp
== 0); break;
2227 case NE_EXPR
: val
= (cmp
!= 0); break;
2228 case LT_EXPR
: val
= (cmp
< 0); break;
2229 case GT_EXPR
: val
= (cmp
> 0); break;
2230 case LE_EXPR
: val
= (cmp
<= 0); break;
2231 case GE_EXPR
: val
= (cmp
>= 0); break;
2232 default: done
= false;
2237 return boolean_true_node
;
2239 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2242 /* Likewise if the second comparison is a != comparison. */
2243 else if (code2
== NE_EXPR
)
2249 case EQ_EXPR
: val
= (cmp
== 0); break;
2250 case NE_EXPR
: val
= (cmp
!= 0); break;
2251 case LT_EXPR
: val
= (cmp
> 0); break;
2252 case GT_EXPR
: val
= (cmp
< 0); break;
2253 case LE_EXPR
: val
= (cmp
>= 0); break;
2254 case GE_EXPR
: val
= (cmp
<= 0); break;
2255 default: done
= false;
2260 return boolean_true_node
;
2262 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2266 /* See if an equality test is redundant with the other comparison. */
2267 else if (code1
== EQ_EXPR
)
2272 case EQ_EXPR
: val
= (cmp
== 0); break;
2273 case NE_EXPR
: val
= (cmp
!= 0); break;
2274 case LT_EXPR
: val
= (cmp
< 0); break;
2275 case GT_EXPR
: val
= (cmp
> 0); break;
2276 case LE_EXPR
: val
= (cmp
<= 0); break;
2277 case GE_EXPR
: val
= (cmp
>= 0); break;
2282 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2284 else if (code2
== EQ_EXPR
)
2289 case EQ_EXPR
: val
= (cmp
== 0); break;
2290 case NE_EXPR
: val
= (cmp
!= 0); break;
2291 case LT_EXPR
: val
= (cmp
> 0); break;
2292 case GT_EXPR
: val
= (cmp
< 0); break;
2293 case LE_EXPR
: val
= (cmp
>= 0); break;
2294 case GE_EXPR
: val
= (cmp
<= 0); break;
2299 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2302 /* Chose the less restrictive of two < or <= comparisons. */
2303 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2304 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2306 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2307 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2309 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2312 /* Likewise chose the less restrictive of two > or >= comparisons. */
2313 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2314 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2316 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2317 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2319 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2322 /* Check for singleton ranges. */
2324 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2325 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2326 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2328 /* Check for less/greater pairs that don't restrict the range at all. */
2330 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2331 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2332 return boolean_true_node
;
2334 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2335 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2336 return boolean_true_node
;
2339 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2340 NAME's definition is a truth value. See if there are any simplifications
2341 that can be done against the NAME's definition. */
2342 if (TREE_CODE (op1a
) == SSA_NAME
2343 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2344 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2346 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2347 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2348 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2349 switch (gimple_code (stmt
))
2352 /* Try to simplify by copy-propagating the definition. */
2353 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2356 /* If every argument to the PHI produces the same result when
2357 ORed with the second comparison, we win.
2358 Do not do this unless the type is bool since we need a bool
2359 result here anyway. */
2360 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2362 tree result
= NULL_TREE
;
2364 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2366 tree arg
= gimple_phi_arg_def (stmt
, i
);
2368 /* If this PHI has itself as an argument, ignore it.
2369 If all the other args produce the same result,
2371 if (arg
== gimple_phi_result (stmt
))
2373 else if (TREE_CODE (arg
) == INTEGER_CST
)
2375 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2378 result
= boolean_true_node
;
2379 else if (!integer_onep (result
))
2383 result
= fold_build2 (code2
, boolean_type_node
,
2385 else if (!same_bool_comparison_p (result
,
2389 else if (TREE_CODE (arg
) == SSA_NAME
2390 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2393 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2394 /* In simple cases we can look through PHI nodes,
2395 but we have to be careful with loops.
2397 if (! dom_info_available_p (CDI_DOMINATORS
)
2398 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2399 || dominated_by_p (CDI_DOMINATORS
,
2400 gimple_bb (def_stmt
),
2403 temp
= or_var_with_comparison (arg
, invert
, code2
,
2409 else if (!same_bool_result_p (result
, temp
))
2425 /* Try to simplify the OR of two comparisons, specified by
2426 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2427 If this can be simplified to a single expression (without requiring
2428 introducing more SSA variables to hold intermediate values),
2429 return the resulting tree. Otherwise return NULL_TREE.
2430 If the result expression is non-null, it has boolean type. */
2433 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2434 enum tree_code code2
, tree op2a
, tree op2b
)
2436 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2440 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2444 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2446 Either NULL_TREE, a simplified but non-constant or a constant
2449 ??? This should go into a gimple-fold-inline.h file to be eventually
2450 privatized with the single valueize function used in the various TUs
2451 to avoid the indirect function call overhead. */
2454 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2456 location_t loc
= gimple_location (stmt
);
2457 switch (gimple_code (stmt
))
2461 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2463 switch (get_gimple_rhs_class (subcode
))
2465 case GIMPLE_SINGLE_RHS
:
2467 tree rhs
= gimple_assign_rhs1 (stmt
);
2468 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2470 if (TREE_CODE (rhs
) == SSA_NAME
)
2472 /* If the RHS is an SSA_NAME, return its known constant value,
2474 return (*valueize
) (rhs
);
2476 /* Handle propagating invariant addresses into address
2478 else if (TREE_CODE (rhs
) == ADDR_EXPR
2479 && !is_gimple_min_invariant (rhs
))
2481 HOST_WIDE_INT offset
= 0;
2483 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2487 && (CONSTANT_CLASS_P (base
)
2488 || decl_address_invariant_p (base
)))
2489 return build_invariant_address (TREE_TYPE (rhs
),
2492 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2493 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2494 && (CONSTRUCTOR_NELTS (rhs
)
2495 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2500 vec
= XALLOCAVEC (tree
,
2501 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2502 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2504 val
= (*valueize
) (val
);
2505 if (TREE_CODE (val
) == INTEGER_CST
2506 || TREE_CODE (val
) == REAL_CST
2507 || TREE_CODE (val
) == FIXED_CST
)
2513 return build_vector (TREE_TYPE (rhs
), vec
);
2516 if (kind
== tcc_reference
)
2518 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2519 || TREE_CODE (rhs
) == REALPART_EXPR
2520 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2521 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2523 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2524 return fold_unary_loc (EXPR_LOCATION (rhs
),
2526 TREE_TYPE (rhs
), val
);
2528 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2529 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2531 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2532 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2534 TREE_TYPE (rhs
), val
,
2535 TREE_OPERAND (rhs
, 1),
2536 TREE_OPERAND (rhs
, 2));
2538 else if (TREE_CODE (rhs
) == MEM_REF
2539 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2541 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2542 if (TREE_CODE (val
) == ADDR_EXPR
2543 && is_gimple_min_invariant (val
))
2545 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2547 TREE_OPERAND (rhs
, 1));
2552 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2554 else if (kind
== tcc_declaration
)
2555 return get_symbol_constant_value (rhs
);
2559 case GIMPLE_UNARY_RHS
:
2561 /* Handle unary operators that can appear in GIMPLE form.
2562 Note that we know the single operand must be a constant,
2563 so this should almost always return a simplified RHS. */
2564 tree lhs
= gimple_assign_lhs (stmt
);
2565 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2567 /* Conversions are useless for CCP purposes if they are
2568 value-preserving. Thus the restrictions that
2569 useless_type_conversion_p places for restrict qualification
2570 of pointer types should not apply here.
2571 Substitution later will only substitute to allowed places. */
2572 if (CONVERT_EXPR_CODE_P (subcode
)
2573 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2574 && POINTER_TYPE_P (TREE_TYPE (op0
))
2575 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2576 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2577 && TYPE_MODE (TREE_TYPE (lhs
))
2578 == TYPE_MODE (TREE_TYPE (op0
)))
2582 fold_unary_ignore_overflow_loc (loc
, subcode
,
2583 gimple_expr_type (stmt
), op0
);
2586 case GIMPLE_BINARY_RHS
:
2588 /* Handle binary operators that can appear in GIMPLE form. */
2589 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2590 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2592 /* Translate &x + CST into an invariant form suitable for
2593 further propagation. */
2594 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2595 && TREE_CODE (op0
) == ADDR_EXPR
2596 && TREE_CODE (op1
) == INTEGER_CST
)
2598 tree off
= fold_convert (ptr_type_node
, op1
);
2599 return build_fold_addr_expr_loc
2601 fold_build2 (MEM_REF
,
2602 TREE_TYPE (TREE_TYPE (op0
)),
2603 unshare_expr (op0
), off
));
2606 return fold_binary_loc (loc
, subcode
,
2607 gimple_expr_type (stmt
), op0
, op1
);
2610 case GIMPLE_TERNARY_RHS
:
2612 /* Handle ternary operators that can appear in GIMPLE form. */
2613 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2614 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2615 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2617 /* Fold embedded expressions in ternary codes. */
2618 if ((subcode
== COND_EXPR
2619 || subcode
== VEC_COND_EXPR
)
2620 && COMPARISON_CLASS_P (op0
))
2622 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2623 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2624 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2625 TREE_TYPE (op0
), op00
, op01
);
2630 return fold_ternary_loc (loc
, subcode
,
2631 gimple_expr_type (stmt
), op0
, op1
, op2
);
2643 if (gimple_call_internal_p (stmt
))
2644 /* No folding yet for these functions. */
2647 fn
= (*valueize
) (gimple_call_fn (stmt
));
2648 if (TREE_CODE (fn
) == ADDR_EXPR
2649 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2650 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2652 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2655 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2656 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2657 call
= build_call_array_loc (loc
,
2658 gimple_call_return_type (stmt
),
2659 fn
, gimple_call_num_args (stmt
), args
);
2660 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2662 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2663 STRIP_NOPS (retval
);
2674 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2675 Returns NULL_TREE if folding to a constant is not possible, otherwise
2676 returns a constant according to is_gimple_min_invariant. */
2679 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2681 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2682 if (res
&& is_gimple_min_invariant (res
))
2688 /* The following set of functions are supposed to fold references using
2689 their constant initializers. */
2691 static tree
fold_ctor_reference (tree type
, tree ctor
,
2692 unsigned HOST_WIDE_INT offset
,
2693 unsigned HOST_WIDE_INT size
, tree
);
2695 /* See if we can find constructor defining value of BASE.
2696 When we know the consructor with constant offset (such as
2697 base is array[40] and we do know constructor of array), then
2698 BIT_OFFSET is adjusted accordingly.
2700 As a special case, return error_mark_node when constructor
2701 is not explicitly available, but it is known to be zero
2702 such as 'static const int a;'. */
2704 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2705 tree (*valueize
)(tree
))
2707 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2708 if (TREE_CODE (base
) == MEM_REF
)
2710 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2712 if (!host_integerp (TREE_OPERAND (base
, 1), 0))
2714 *bit_offset
+= (mem_ref_offset (base
).low
2719 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2720 base
= valueize (TREE_OPERAND (base
, 0));
2721 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2723 base
= TREE_OPERAND (base
, 0);
2726 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2727 DECL_INITIAL. If BASE is a nested reference into another
2728 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2729 the inner reference. */
2730 switch (TREE_CODE (base
))
2735 tree init
= ctor_for_folding (base
);
2737 /* Our semantic is exact opposite of ctor_for_folding;
2738 NULL means unknown, while error_mark_node is 0. */
2739 if (init
== error_mark_node
)
2742 return error_mark_node
;
2748 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2749 if (max_size
== -1 || size
!= max_size
)
2751 *bit_offset
+= bit_offset2
;
2752 return get_base_constructor (base
, bit_offset
, valueize
);
2763 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2764 to the memory at bit OFFSET.
2766 We do only simple job of folding byte accesses. */
2769 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2770 unsigned HOST_WIDE_INT offset
,
2771 unsigned HOST_WIDE_INT size
)
2773 if (INTEGRAL_TYPE_P (type
)
2774 && (TYPE_MODE (type
)
2775 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2776 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2778 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2779 && size
== BITS_PER_UNIT
2780 && !(offset
% BITS_PER_UNIT
))
2782 offset
/= BITS_PER_UNIT
;
2783 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2784 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2787 const char a[20]="hello";
2790 might lead to offset greater than string length. In this case we
2791 know value is either initialized to 0 or out of bounds. Return 0
2793 return build_zero_cst (type
);
2798 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2799 SIZE to the memory at bit OFFSET. */
2802 fold_array_ctor_reference (tree type
, tree ctor
,
2803 unsigned HOST_WIDE_INT offset
,
2804 unsigned HOST_WIDE_INT size
,
2807 unsigned HOST_WIDE_INT cnt
;
2809 double_int low_bound
, elt_size
;
2810 double_int index
, max_index
;
2811 double_int access_index
;
2812 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2813 HOST_WIDE_INT inner_offset
;
2815 /* Compute low bound and elt size. */
2816 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2817 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2818 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2820 /* Static constructors for variably sized objects makes no sense. */
2821 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2822 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2823 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2826 low_bound
= double_int_zero
;
2827 /* Static constructors for variably sized objects makes no sense. */
2828 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2831 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2834 /* We can handle only constantly sized accesses that are known to not
2835 be larger than size of array element. */
2836 if (!TYPE_SIZE_UNIT (type
)
2837 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2838 || elt_size
.slt (tree_to_double_int (TYPE_SIZE_UNIT (type
))))
2841 /* Compute the array index we look for. */
2842 access_index
= double_int::from_uhwi (offset
/ BITS_PER_UNIT
)
2843 .udiv (elt_size
, TRUNC_DIV_EXPR
);
2844 access_index
+= low_bound
;
2846 access_index
= access_index
.ext (TYPE_PRECISION (index_type
),
2847 TYPE_UNSIGNED (index_type
));
2849 /* And offset within the access. */
2850 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
2852 /* See if the array field is large enough to span whole access. We do not
2853 care to fold accesses spanning multiple array indexes. */
2854 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
2857 index
= low_bound
- double_int_one
;
2859 index
= index
.ext (TYPE_PRECISION (index_type
), TYPE_UNSIGNED (index_type
));
2861 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2863 /* Array constructor might explicitely set index, or specify range
2864 or leave index NULL meaning that it is next index after previous
2868 if (TREE_CODE (cfield
) == INTEGER_CST
)
2869 max_index
= index
= tree_to_double_int (cfield
);
2872 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2873 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2874 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2879 index
+= double_int_one
;
2881 index
= index
.ext (TYPE_PRECISION (index_type
),
2882 TYPE_UNSIGNED (index_type
));
2886 /* Do we have match? */
2887 if (access_index
.cmp (index
, 1) >= 0
2888 && access_index
.cmp (max_index
, 1) <= 0)
2889 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
2892 /* When memory is not explicitely mentioned in constructor,
2893 it is 0 (or out of range). */
2894 return build_zero_cst (type
);
2897 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2898 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2901 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2902 unsigned HOST_WIDE_INT offset
,
2903 unsigned HOST_WIDE_INT size
,
2906 unsigned HOST_WIDE_INT cnt
;
2909 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2912 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2913 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2914 tree field_size
= DECL_SIZE (cfield
);
2915 double_int bitoffset
;
2916 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2917 double_int bits_per_unit_cst
= double_int::from_uhwi (BITS_PER_UNIT
);
2918 double_int bitoffset_end
, access_end
;
2920 /* Variable sized objects in static constructors makes no sense,
2921 but field_size can be NULL for flexible array members. */
2922 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2923 && TREE_CODE (byte_offset
) == INTEGER_CST
2924 && (field_size
!= NULL_TREE
2925 ? TREE_CODE (field_size
) == INTEGER_CST
2926 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2928 /* Compute bit offset of the field. */
2929 bitoffset
= tree_to_double_int (field_offset
)
2930 + byte_offset_cst
* bits_per_unit_cst
;
2931 /* Compute bit offset where the field ends. */
2932 if (field_size
!= NULL_TREE
)
2933 bitoffset_end
= bitoffset
+ tree_to_double_int (field_size
);
2935 bitoffset_end
= double_int_zero
;
2937 access_end
= double_int::from_uhwi (offset
)
2938 + double_int::from_uhwi (size
);
2940 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2941 [BITOFFSET, BITOFFSET_END)? */
2942 if (access_end
.cmp (bitoffset
, 0) > 0
2943 && (field_size
== NULL_TREE
2944 || double_int::from_uhwi (offset
).slt (bitoffset_end
)))
2946 double_int inner_offset
= double_int::from_uhwi (offset
) - bitoffset
;
2947 /* We do have overlap. Now see if field is large enough to
2948 cover the access. Give up for accesses spanning multiple
2950 if (access_end
.cmp (bitoffset_end
, 0) > 0)
2952 if (double_int::from_uhwi (offset
).slt (bitoffset
))
2954 return fold_ctor_reference (type
, cval
,
2955 inner_offset
.to_uhwi (), size
,
2959 /* When memory is not explicitely mentioned in constructor, it is 0. */
2960 return build_zero_cst (type
);
2963 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2964 to the memory at bit OFFSET. */
2967 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
2968 unsigned HOST_WIDE_INT size
, tree from_decl
)
2972 /* We found the field with exact match. */
2973 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
2975 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
2977 /* We are at the end of walk, see if we can view convert the
2979 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
2980 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2981 && operand_equal_p (TYPE_SIZE (type
),
2982 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
2984 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
2985 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
2990 if (TREE_CODE (ctor
) == STRING_CST
)
2991 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
2992 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
2995 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
2996 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
2997 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
3000 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
3007 /* Return the tree representing the element referenced by T if T is an
3008 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3009 names using VALUEIZE. Return NULL_TREE otherwise. */
3012 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
3014 tree ctor
, idx
, base
;
3015 HOST_WIDE_INT offset
, size
, max_size
;
3018 if (TREE_THIS_VOLATILE (t
))
3021 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3022 return get_symbol_constant_value (t
);
3024 tem
= fold_read_from_constant_string (t
);
3028 switch (TREE_CODE (t
))
3031 case ARRAY_RANGE_REF
:
3032 /* Constant indexes are handled well by get_base_constructor.
3033 Only special case variable offsets.
3034 FIXME: This code can't handle nested references with variable indexes
3035 (they will be handled only by iteration of ccp). Perhaps we can bring
3036 get_ref_base_and_extent here and make it use a valueize callback. */
3037 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3039 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3040 && TREE_CODE (idx
) == INTEGER_CST
)
3042 tree low_bound
, unit_size
;
3045 /* If the resulting bit-offset is constant, track it. */
3046 if ((low_bound
= array_ref_low_bound (t
),
3047 TREE_CODE (low_bound
) == INTEGER_CST
)
3048 && (unit_size
= array_ref_element_size (t
),
3049 host_integerp (unit_size
, 1))
3050 && (doffset
= (TREE_INT_CST (idx
) - TREE_INT_CST (low_bound
))
3051 .sext (TYPE_PRECISION (TREE_TYPE (idx
))),
3052 doffset
.fits_shwi ()))
3054 offset
= doffset
.to_shwi ();
3055 offset
*= TREE_INT_CST_LOW (unit_size
);
3056 offset
*= BITS_PER_UNIT
;
3058 base
= TREE_OPERAND (t
, 0);
3059 ctor
= get_base_constructor (base
, &offset
, valueize
);
3060 /* Empty constructor. Always fold to 0. */
3061 if (ctor
== error_mark_node
)
3062 return build_zero_cst (TREE_TYPE (t
));
3063 /* Out of bound array access. Value is undefined,
3067 /* We can not determine ctor. */
3070 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3071 TREE_INT_CST_LOW (unit_size
)
3080 case TARGET_MEM_REF
:
3082 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3083 ctor
= get_base_constructor (base
, &offset
, valueize
);
3085 /* Empty constructor. Always fold to 0. */
3086 if (ctor
== error_mark_node
)
3087 return build_zero_cst (TREE_TYPE (t
));
3088 /* We do not know precise address. */
3089 if (max_size
== -1 || max_size
!= size
)
3091 /* We can not determine ctor. */
3095 /* Out of bound array access. Value is undefined, but don't fold. */
3099 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3105 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3106 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3107 return fold_build1_loc (EXPR_LOCATION (t
),
3108 TREE_CODE (t
), TREE_TYPE (t
), c
);
3120 fold_const_aggregate_ref (tree t
)
3122 return fold_const_aggregate_ref_1 (t
, NULL
);
3125 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3126 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3127 KNOWN_BINFO carries the binfo describing the true type of
3128 OBJ_TYPE_REF_OBJECT(REF). */
3131 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3133 unsigned HOST_WIDE_INT offset
, size
;
3134 tree v
, fn
, vtable
, init
;
3136 vtable
= v
= BINFO_VTABLE (known_binfo
);
3137 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3141 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3143 offset
= tree_low_cst (TREE_OPERAND (v
, 1), 1) * BITS_PER_UNIT
;
3144 v
= TREE_OPERAND (v
, 0);
3149 if (TREE_CODE (v
) != ADDR_EXPR
)
3151 v
= TREE_OPERAND (v
, 0);
3153 if (TREE_CODE (v
) != VAR_DECL
3154 || !DECL_VIRTUAL_P (v
))
3156 init
= ctor_for_folding (v
);
3158 /* The virtual tables should always be born with constructors.
3159 and we always should assume that they are avaialble for
3160 folding. At the moment we do not stream them in all cases,
3161 but it should never happen that ctor seem unreachable. */
3163 if (init
== error_mark_node
)
3165 gcc_assert (in_lto_p
);
3168 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3169 size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))), 1);
3170 offset
+= token
* size
;
3171 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), init
,
3173 if (!fn
|| integer_zerop (fn
))
3175 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3176 || TREE_CODE (fn
) == FDESC_EXPR
);
3177 fn
= TREE_OPERAND (fn
, 0);
3178 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3180 /* When cgraph node is missing and function is not public, we cannot
3181 devirtualize. This can happen in WHOPR when the actual method
3182 ends up in other partition, because we found devirtualization
3183 possibility too late. */
3184 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3187 /* Make sure we create a cgraph node for functions we'll reference.
3188 They can be non-existent if the reference comes from an entry
3189 of an external vtable for example. */
3190 cgraph_get_create_node (fn
);
3195 /* Return true iff VAL is a gimple expression that is known to be
3196 non-negative. Restricted to floating-point inputs. */
3199 gimple_val_nonnegative_real_p (tree val
)
3203 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3205 /* Use existing logic for non-gimple trees. */
3206 if (tree_expr_nonnegative_p (val
))
3209 if (TREE_CODE (val
) != SSA_NAME
)
3212 /* Currently we look only at the immediately defining statement
3213 to make this determination, since recursion on defining
3214 statements of operands can lead to quadratic behavior in the
3215 worst case. This is expected to catch almost all occurrences
3216 in practice. It would be possible to implement limited-depth
3217 recursion if important cases are lost. Alternatively, passes
3218 that need this information (such as the pow/powi lowering code
3219 in the cse_sincos pass) could be revised to provide it through
3220 dataflow propagation. */
3222 def_stmt
= SSA_NAME_DEF_STMT (val
);
3224 if (is_gimple_assign (def_stmt
))
3228 /* See fold-const.c:tree_expr_nonnegative_p for additional
3229 cases that could be handled with recursion. */
3231 switch (gimple_assign_rhs_code (def_stmt
))
3234 /* Always true for floating-point operands. */
3238 /* True if the two operands are identical (since we are
3239 restricted to floating-point inputs). */
3240 op0
= gimple_assign_rhs1 (def_stmt
);
3241 op1
= gimple_assign_rhs2 (def_stmt
);
3244 || operand_equal_p (op0
, op1
, 0))
3251 else if (is_gimple_call (def_stmt
))
3253 tree fndecl
= gimple_call_fndecl (def_stmt
);
3255 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3259 switch (DECL_FUNCTION_CODE (fndecl
))
3261 CASE_FLT_FN (BUILT_IN_ACOS
):
3262 CASE_FLT_FN (BUILT_IN_ACOSH
):
3263 CASE_FLT_FN (BUILT_IN_CABS
):
3264 CASE_FLT_FN (BUILT_IN_COSH
):
3265 CASE_FLT_FN (BUILT_IN_ERFC
):
3266 CASE_FLT_FN (BUILT_IN_EXP
):
3267 CASE_FLT_FN (BUILT_IN_EXP10
):
3268 CASE_FLT_FN (BUILT_IN_EXP2
):
3269 CASE_FLT_FN (BUILT_IN_FABS
):
3270 CASE_FLT_FN (BUILT_IN_FDIM
):
3271 CASE_FLT_FN (BUILT_IN_HYPOT
):
3272 CASE_FLT_FN (BUILT_IN_POW10
):
3275 CASE_FLT_FN (BUILT_IN_SQRT
):
3276 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3277 nonnegative inputs. */
3278 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3283 CASE_FLT_FN (BUILT_IN_POWI
):
3284 /* True if the second argument is an even integer. */
3285 arg1
= gimple_call_arg (def_stmt
, 1);
3287 if (TREE_CODE (arg1
) == INTEGER_CST
3288 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3293 CASE_FLT_FN (BUILT_IN_POW
):
3294 /* True if the second argument is an even integer-valued
3296 arg1
= gimple_call_arg (def_stmt
, 1);
3298 if (TREE_CODE (arg1
) == REAL_CST
)
3303 c
= TREE_REAL_CST (arg1
);
3304 n
= real_to_integer (&c
);
3308 REAL_VALUE_TYPE cint
;
3309 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3310 if (real_identical (&c
, &cint
))