1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-ssa-propagate.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
59 struct varpool_node
*vnode
;
60 struct cgraph_node
*node
;
63 if (DECL_ABSTRACT (decl
))
66 /* We are concerned only about static/external vars and functions. */
67 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
68 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
71 /* Static objects can be referred only if they was not optimized out yet. */
72 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
74 snode
= symtab_get_node (decl
);
77 node
= dyn_cast
<cgraph_node
> (snode
);
78 return !node
|| !node
->global
.inlined_to
;
81 /* We will later output the initializer, so we can refer to it.
82 So we are concerned only when DECL comes from initializer of
85 || TREE_CODE (from_decl
) != VAR_DECL
86 || !DECL_EXTERNAL (from_decl
)
88 && symtab_get_node (from_decl
)->symbol
.in_other_partition
))
90 /* We are folding reference from external vtable. The vtable may reffer
91 to a symbol keyed to other compilation unit. The other compilation
92 unit may be in separate DSO and the symbol may be hidden. */
93 if (DECL_VISIBILITY_SPECIFIED (decl
)
94 && DECL_EXTERNAL (decl
)
95 && (!(snode
= symtab_get_node (decl
)) || !snode
->symbol
.in_other_partition
))
97 /* When function is public, we always can introduce new reference.
98 Exception are the COMDAT functions where introducing a direct
99 reference imply need to include function body in the curren tunit. */
100 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
102 /* We are not at ltrans stage; so don't worry about WHOPR.
103 Also when still gimplifying all referred comdat functions will be
106 As observed in PR20991 for already optimized out comdat virtual functions
107 it may be tempting to not necessarily give up because the copy will be
108 output elsewhere when corresponding vtable is output.
109 This is however not possible - ABI specify that COMDATs are output in
110 units where they are used and when the other unit was compiled with LTO
111 it is possible that vtable was kept public while the function itself
113 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
116 /* OK we are seeing either COMDAT or static variable. In this case we must
117 check that the definition is still around so we can refer it. */
118 if (TREE_CODE (decl
) == FUNCTION_DECL
)
120 node
= cgraph_get_node (decl
);
121 /* Check that we still have function body and that we didn't took
122 the decision to eliminate offline copy of the function yet.
123 The second is important when devirtualization happens during final
124 compilation stage when making a new reference no longer makes callee
126 if (!node
|| !node
->symbol
.definition
|| node
->global
.inlined_to
)
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
132 else if (TREE_CODE (decl
) == VAR_DECL
)
134 vnode
= varpool_get_node (decl
);
135 if (!vnode
|| !vnode
->symbol
.definition
)
137 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
144 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
145 acceptable form for is_gimple_min_invariant.
146 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
149 canonicalize_constructor_val (tree cval
, tree from_decl
)
151 tree orig_cval
= cval
;
153 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
154 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
156 tree ptr
= TREE_OPERAND (cval
, 0);
157 if (is_gimple_min_invariant (ptr
))
158 cval
= build1_loc (EXPR_LOCATION (cval
),
159 ADDR_EXPR
, TREE_TYPE (ptr
),
160 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
162 fold_convert (ptr_type_node
,
163 TREE_OPERAND (cval
, 1))));
165 if (TREE_CODE (cval
) == ADDR_EXPR
)
167 tree base
= NULL_TREE
;
168 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
170 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
172 TREE_OPERAND (cval
, 0) = base
;
175 base
= get_base_address (TREE_OPERAND (cval
, 0));
179 if ((TREE_CODE (base
) == VAR_DECL
180 || TREE_CODE (base
) == FUNCTION_DECL
)
181 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
183 if (TREE_CODE (base
) == VAR_DECL
)
184 TREE_ADDRESSABLE (base
) = 1;
185 else if (TREE_CODE (base
) == FUNCTION_DECL
)
187 /* Make sure we create a cgraph node for functions we'll reference.
188 They can be non-existent if the reference comes from an entry
189 of an external vtable for example. */
190 cgraph_get_create_real_symbol_node (base
);
192 /* Fixup types in global initializers. */
193 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
194 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
196 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
197 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
203 /* If SYM is a constant variable with known value, return the value.
204 NULL_TREE is returned otherwise. */
207 get_symbol_constant_value (tree sym
)
209 tree val
= ctor_for_folding (sym
);
210 if (val
!= error_mark_node
)
214 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
215 if (val
&& is_gimple_min_invariant (val
))
220 /* Variables declared 'const' without an initializer
221 have zero as the initializer if they may not be
222 overridden at link or run time. */
224 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
225 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
226 return build_zero_cst (TREE_TYPE (sym
));
234 /* Subroutine of fold_stmt. We perform several simplifications of the
235 memory reference tree EXPR and make sure to re-gimplify them properly
236 after propagation of constant addresses. IS_LHS is true if the
237 reference is supposed to be an lvalue. */
240 maybe_fold_reference (tree expr
, bool is_lhs
)
245 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
246 || TREE_CODE (expr
) == REALPART_EXPR
247 || TREE_CODE (expr
) == IMAGPART_EXPR
)
248 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
249 return fold_unary_loc (EXPR_LOCATION (expr
),
252 TREE_OPERAND (expr
, 0));
253 else if (TREE_CODE (expr
) == BIT_FIELD_REF
254 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
255 return fold_ternary_loc (EXPR_LOCATION (expr
),
258 TREE_OPERAND (expr
, 0),
259 TREE_OPERAND (expr
, 1),
260 TREE_OPERAND (expr
, 2));
262 while (handled_component_p (*t
))
263 t
= &TREE_OPERAND (*t
, 0);
265 /* Canonicalize MEM_REFs invariant address operand. Do this first
266 to avoid feeding non-canonical MEM_REFs elsewhere. */
267 if (TREE_CODE (*t
) == MEM_REF
268 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
270 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
271 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
272 TREE_OPERAND (*t
, 0),
273 TREE_OPERAND (*t
, 1));
276 TREE_THIS_VOLATILE (tem
) = volatile_p
;
278 tem
= maybe_fold_reference (expr
, is_lhs
);
286 && (result
= fold_const_aggregate_ref (expr
))
287 && is_gimple_min_invariant (result
))
290 /* Fold back MEM_REFs to reference trees. */
291 if (TREE_CODE (*t
) == MEM_REF
292 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
293 && integer_zerop (TREE_OPERAND (*t
, 1))
294 && (TREE_THIS_VOLATILE (*t
)
295 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
296 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
297 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
298 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
299 /* We have to look out here to not drop a required conversion
300 from the rhs to the lhs if is_lhs, but we don't have the
301 rhs here to verify that. Thus require strict type
303 && types_compatible_p (TREE_TYPE (*t
),
304 TREE_TYPE (TREE_OPERAND
305 (TREE_OPERAND (*t
, 0), 0))))
308 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
309 tem
= maybe_fold_reference (expr
, is_lhs
);
314 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
316 tree tem
= maybe_fold_tmr (*t
);
320 tem
= maybe_fold_reference (expr
, is_lhs
);
331 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
332 replacement rhs for the statement or NULL_TREE if no simplification
333 could be made. It is assumed that the operands have been previously
337 fold_gimple_assign (gimple_stmt_iterator
*si
)
339 gimple stmt
= gsi_stmt (*si
);
340 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
341 location_t loc
= gimple_location (stmt
);
343 tree result
= NULL_TREE
;
345 switch (get_gimple_rhs_class (subcode
))
347 case GIMPLE_SINGLE_RHS
:
349 tree rhs
= gimple_assign_rhs1 (stmt
);
351 if (REFERENCE_CLASS_P (rhs
))
352 return maybe_fold_reference (rhs
, false);
354 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
356 tree ref
= TREE_OPERAND (rhs
, 0);
357 tree tem
= maybe_fold_reference (ref
, true);
359 && TREE_CODE (tem
) == MEM_REF
360 && integer_zerop (TREE_OPERAND (tem
, 1)))
361 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
363 result
= fold_convert (TREE_TYPE (rhs
),
364 build_fold_addr_expr_loc (loc
, tem
));
365 else if (TREE_CODE (ref
) == MEM_REF
366 && integer_zerop (TREE_OPERAND (ref
, 1)))
367 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
370 else if (TREE_CODE (rhs
) == CONSTRUCTOR
371 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
372 && (CONSTRUCTOR_NELTS (rhs
)
373 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
375 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
379 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
380 if (TREE_CODE (val
) != INTEGER_CST
381 && TREE_CODE (val
) != REAL_CST
382 && TREE_CODE (val
) != FIXED_CST
)
385 return build_vector_from_ctor (TREE_TYPE (rhs
),
386 CONSTRUCTOR_ELTS (rhs
));
389 else if (DECL_P (rhs
))
390 return get_symbol_constant_value (rhs
);
392 /* If we couldn't fold the RHS, hand over to the generic
394 if (result
== NULL_TREE
)
397 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
398 that may have been added by fold, and "useless" type
399 conversions that might now be apparent due to propagation. */
400 STRIP_USELESS_TYPE_CONVERSION (result
);
402 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
409 case GIMPLE_UNARY_RHS
:
411 tree rhs
= gimple_assign_rhs1 (stmt
);
413 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
416 /* If the operation was a conversion do _not_ mark a
417 resulting constant with TREE_OVERFLOW if the original
418 constant was not. These conversions have implementation
419 defined behavior and retaining the TREE_OVERFLOW flag
420 here would confuse later passes such as VRP. */
421 if (CONVERT_EXPR_CODE_P (subcode
)
422 && TREE_CODE (result
) == INTEGER_CST
423 && TREE_CODE (rhs
) == INTEGER_CST
)
424 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
426 STRIP_USELESS_TYPE_CONVERSION (result
);
427 if (valid_gimple_rhs_p (result
))
433 case GIMPLE_BINARY_RHS
:
434 /* Try to canonicalize for boolean-typed X the comparisons
435 X == 0, X == 1, X != 0, and X != 1. */
436 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
437 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
439 tree lhs
= gimple_assign_lhs (stmt
);
440 tree op1
= gimple_assign_rhs1 (stmt
);
441 tree op2
= gimple_assign_rhs2 (stmt
);
442 tree type
= TREE_TYPE (op1
);
444 /* Check whether the comparison operands are of the same boolean
445 type as the result type is.
446 Check that second operand is an integer-constant with value
448 if (TREE_CODE (op2
) == INTEGER_CST
449 && (integer_zerop (op2
) || integer_onep (op2
))
450 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
452 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
453 bool is_logical_not
= false;
455 /* X == 0 and X != 1 is a logical-not.of X
456 X == 1 and X != 0 is X */
457 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
458 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
459 is_logical_not
= true;
461 if (is_logical_not
== false)
463 /* Only for one-bit precision typed X the transformation
464 !X -> ~X is valied. */
465 else if (TYPE_PRECISION (type
) == 1)
466 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
468 /* Otherwise we use !X -> X ^ 1. */
470 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
471 type
, op1
, build_int_cst (type
, 1));
477 result
= fold_binary_loc (loc
, subcode
,
478 TREE_TYPE (gimple_assign_lhs (stmt
)),
479 gimple_assign_rhs1 (stmt
),
480 gimple_assign_rhs2 (stmt
));
484 STRIP_USELESS_TYPE_CONVERSION (result
);
485 if (valid_gimple_rhs_p (result
))
490 case GIMPLE_TERNARY_RHS
:
491 /* Try to fold a conditional expression. */
492 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
494 tree op0
= gimple_assign_rhs1 (stmt
);
497 location_t cond_loc
= gimple_location (stmt
);
499 if (COMPARISON_CLASS_P (op0
))
501 fold_defer_overflow_warnings ();
502 tem
= fold_binary_loc (cond_loc
,
503 TREE_CODE (op0
), TREE_TYPE (op0
),
504 TREE_OPERAND (op0
, 0),
505 TREE_OPERAND (op0
, 1));
506 /* This is actually a conditional expression, not a GIMPLE
507 conditional statement, however, the valid_gimple_rhs_p
508 test still applies. */
509 set
= (tem
&& is_gimple_condexpr (tem
)
510 && valid_gimple_rhs_p (tem
));
511 fold_undefer_overflow_warnings (set
, stmt
, 0);
513 else if (is_gimple_min_invariant (op0
))
522 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
523 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
524 gimple_assign_rhs2 (stmt
),
525 gimple_assign_rhs3 (stmt
));
529 result
= fold_ternary_loc (loc
, subcode
,
530 TREE_TYPE (gimple_assign_lhs (stmt
)),
531 gimple_assign_rhs1 (stmt
),
532 gimple_assign_rhs2 (stmt
),
533 gimple_assign_rhs3 (stmt
));
537 STRIP_USELESS_TYPE_CONVERSION (result
);
538 if (valid_gimple_rhs_p (result
))
543 case GIMPLE_INVALID_RHS
:
550 /* Attempt to fold a conditional statement. Return true if any changes were
551 made. We only attempt to fold the condition expression, and do not perform
552 any transformation that would require alteration of the cfg. It is
553 assumed that the operands have been previously folded. */
556 fold_gimple_cond (gimple stmt
)
558 tree result
= fold_binary_loc (gimple_location (stmt
),
559 gimple_cond_code (stmt
),
561 gimple_cond_lhs (stmt
),
562 gimple_cond_rhs (stmt
));
566 STRIP_USELESS_TYPE_CONVERSION (result
);
567 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
569 gimple_cond_set_condition_from_tree (stmt
, result
);
577 /* Convert EXPR into a GIMPLE value suitable for substitution on the
578 RHS of an assignment. Insert the necessary statements before
579 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
580 is replaced. If the call is expected to produces a result, then it
581 is replaced by an assignment of the new RHS to the result variable.
582 If the result is to be ignored, then the call is replaced by a
583 GIMPLE_NOP. A proper VDEF chain is retained by making the first
584 VUSE and the last VDEF of the whole sequence be the same as the replaced
585 statement and using new SSA names for stores in between. */
588 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
591 gimple stmt
, new_stmt
;
592 gimple_stmt_iterator i
;
593 gimple_seq stmts
= NULL
;
594 struct gimplify_ctx gctx
;
598 stmt
= gsi_stmt (*si_p
);
600 gcc_assert (is_gimple_call (stmt
));
602 push_gimplify_context (&gctx
);
603 gctx
.into_ssa
= gimple_in_ssa_p (cfun
);
605 lhs
= gimple_call_lhs (stmt
);
606 if (lhs
== NULL_TREE
)
608 gimplify_and_add (expr
, &stmts
);
609 /* We can end up with folding a memcpy of an empty class assignment
610 which gets optimized away by C++ gimplification. */
611 if (gimple_seq_empty_p (stmts
))
613 pop_gimplify_context (NULL
);
614 if (gimple_in_ssa_p (cfun
))
616 unlink_stmt_vdef (stmt
);
619 gsi_replace (si_p
, gimple_build_nop (), true);
625 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
626 new_stmt
= gimple_build_assign (lhs
, tmp
);
627 i
= gsi_last (stmts
);
628 gsi_insert_after_without_update (&i
, new_stmt
,
629 GSI_CONTINUE_LINKING
);
632 pop_gimplify_context (NULL
);
634 if (gimple_has_location (stmt
))
635 annotate_all_with_location (stmts
, gimple_location (stmt
));
637 /* First iterate over the replacement statements backward, assigning
638 virtual operands to their defining statements. */
640 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
642 new_stmt
= gsi_stmt (i
);
643 if ((gimple_assign_single_p (new_stmt
)
644 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
645 || (is_gimple_call (new_stmt
)
646 && (gimple_call_flags (new_stmt
)
647 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
651 vdef
= gimple_vdef (stmt
);
653 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
654 gimple_set_vdef (new_stmt
, vdef
);
655 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
656 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
657 laststore
= new_stmt
;
661 /* Second iterate over the statements forward, assigning virtual
662 operands to their uses. */
663 reaching_vuse
= gimple_vuse (stmt
);
664 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
666 new_stmt
= gsi_stmt (i
);
667 /* If the new statement possibly has a VUSE, update it with exact SSA
668 name we know will reach this one. */
669 if (gimple_has_mem_ops (new_stmt
))
670 gimple_set_vuse (new_stmt
, reaching_vuse
);
671 gimple_set_modified (new_stmt
, true);
672 if (gimple_vdef (new_stmt
))
673 reaching_vuse
= gimple_vdef (new_stmt
);
676 /* If the new sequence does not do a store release the virtual
677 definition of the original statement. */
679 && reaching_vuse
== gimple_vuse (stmt
))
681 tree vdef
= gimple_vdef (stmt
);
683 && TREE_CODE (vdef
) == SSA_NAME
)
685 unlink_stmt_vdef (stmt
);
686 release_ssa_name (vdef
);
690 /* Finally replace the original statement with the sequence. */
691 gsi_replace_with_seq (si_p
, stmts
, false);
694 /* Return the string length, maximum string length or maximum value of
696 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
697 is not NULL and, for TYPE == 0, its value is not equal to the length
698 we determine or if we are unable to determine the length or value,
699 return false. VISITED is a bitmap of visited variables.
700 TYPE is 0 if string length should be returned, 1 for maximum string
701 length and 2 for maximum value ARG can have. */
704 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
709 if (TREE_CODE (arg
) != SSA_NAME
)
711 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
712 if (TREE_CODE (arg
) == ADDR_EXPR
713 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
714 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
716 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
717 if (TREE_CODE (aop0
) == INDIRECT_REF
718 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
719 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
720 length
, visited
, type
);
726 if (TREE_CODE (val
) != INTEGER_CST
727 || tree_int_cst_sgn (val
) < 0)
731 val
= c_strlen (arg
, 1);
739 if (TREE_CODE (*length
) != INTEGER_CST
740 || TREE_CODE (val
) != INTEGER_CST
)
743 if (tree_int_cst_lt (*length
, val
))
747 else if (simple_cst_equal (val
, *length
) != 1)
755 /* If ARG is registered for SSA update we cannot look at its defining
757 if (name_registered_for_update_p (arg
))
760 /* If we were already here, break the infinite cycle. */
761 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
765 def_stmt
= SSA_NAME_DEF_STMT (var
);
767 switch (gimple_code (def_stmt
))
770 /* The RHS of the statement defining VAR must either have a
771 constant length or come from another SSA_NAME with a constant
773 if (gimple_assign_single_p (def_stmt
)
774 || gimple_assign_unary_nop_p (def_stmt
))
776 tree rhs
= gimple_assign_rhs1 (def_stmt
);
777 return get_maxval_strlen (rhs
, length
, visited
, type
);
779 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
781 tree op2
= gimple_assign_rhs2 (def_stmt
);
782 tree op3
= gimple_assign_rhs3 (def_stmt
);
783 return get_maxval_strlen (op2
, length
, visited
, type
)
784 && get_maxval_strlen (op3
, length
, visited
, type
);
790 /* All the arguments of the PHI node must have the same constant
794 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
796 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
798 /* If this PHI has itself as an argument, we cannot
799 determine the string length of this argument. However,
800 if we can find a constant string length for the other
801 PHI args then we can still be sure that this is a
802 constant string length. So be optimistic and just
803 continue with the next argument. */
804 if (arg
== gimple_phi_result (def_stmt
))
807 if (!get_maxval_strlen (arg
, length
, visited
, type
))
819 /* Fold builtin call in statement STMT. Returns a simplified tree.
820 We may return a non-constant expression, including another call
821 to a different function and with different arguments, e.g.,
822 substituting memcpy for strcpy when the string length is known.
823 Note that some builtins expand into inline code that may not
824 be valid in GIMPLE. Callers must take care. */
827 gimple_fold_builtin (gimple stmt
)
835 location_t loc
= gimple_location (stmt
);
837 gcc_assert (is_gimple_call (stmt
));
839 ignore
= (gimple_call_lhs (stmt
) == NULL
);
841 /* First try the generic builtin folder. If that succeeds, return the
843 result
= fold_call_stmt (stmt
, ignore
);
851 /* Ignore MD builtins. */
852 callee
= gimple_call_fndecl (stmt
);
853 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
856 /* Give up for always_inline inline builtins until they are
858 if (avoid_folding_inline_builtin (callee
))
861 /* If the builtin could not be folded, and it has no argument list,
863 nargs
= gimple_call_num_args (stmt
);
867 /* Limit the work only for builtins we know how to simplify. */
868 switch (DECL_FUNCTION_CODE (callee
))
870 case BUILT_IN_STRLEN
:
872 case BUILT_IN_FPUTS_UNLOCKED
:
876 case BUILT_IN_STRCPY
:
877 case BUILT_IN_STRNCPY
:
881 case BUILT_IN_MEMCPY_CHK
:
882 case BUILT_IN_MEMPCPY_CHK
:
883 case BUILT_IN_MEMMOVE_CHK
:
884 case BUILT_IN_MEMSET_CHK
:
885 case BUILT_IN_STRNCPY_CHK
:
886 case BUILT_IN_STPNCPY_CHK
:
890 case BUILT_IN_STRCPY_CHK
:
891 case BUILT_IN_STPCPY_CHK
:
895 case BUILT_IN_SNPRINTF_CHK
:
896 case BUILT_IN_VSNPRINTF_CHK
:
904 if (arg_idx
>= nargs
)
907 /* Try to use the dataflow information gathered by the CCP process. */
908 visited
= BITMAP_ALLOC (NULL
);
909 bitmap_clear (visited
);
911 memset (val
, 0, sizeof (val
));
912 a
= gimple_call_arg (stmt
, arg_idx
);
913 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
914 val
[arg_idx
] = NULL_TREE
;
916 BITMAP_FREE (visited
);
919 switch (DECL_FUNCTION_CODE (callee
))
921 case BUILT_IN_STRLEN
:
922 if (val
[0] && nargs
== 1)
925 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
927 /* If the result is not a valid gimple value, or not a cast
928 of a valid gimple value, then we cannot use the result. */
929 if (is_gimple_val (new_val
)
930 || (CONVERT_EXPR_P (new_val
)
931 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
936 case BUILT_IN_STRCPY
:
937 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
938 result
= fold_builtin_strcpy (loc
, callee
,
939 gimple_call_arg (stmt
, 0),
940 gimple_call_arg (stmt
, 1),
944 case BUILT_IN_STRNCPY
:
945 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
946 result
= fold_builtin_strncpy (loc
, callee
,
947 gimple_call_arg (stmt
, 0),
948 gimple_call_arg (stmt
, 1),
949 gimple_call_arg (stmt
, 2),
955 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
956 gimple_call_arg (stmt
, 1),
957 ignore
, false, val
[0]);
960 case BUILT_IN_FPUTS_UNLOCKED
:
962 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
963 gimple_call_arg (stmt
, 1),
964 ignore
, true, val
[0]);
967 case BUILT_IN_MEMCPY_CHK
:
968 case BUILT_IN_MEMPCPY_CHK
:
969 case BUILT_IN_MEMMOVE_CHK
:
970 case BUILT_IN_MEMSET_CHK
:
971 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
972 result
= fold_builtin_memory_chk (loc
, callee
,
973 gimple_call_arg (stmt
, 0),
974 gimple_call_arg (stmt
, 1),
975 gimple_call_arg (stmt
, 2),
976 gimple_call_arg (stmt
, 3),
978 DECL_FUNCTION_CODE (callee
));
981 case BUILT_IN_STRCPY_CHK
:
982 case BUILT_IN_STPCPY_CHK
:
983 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
984 result
= fold_builtin_stxcpy_chk (loc
, callee
,
985 gimple_call_arg (stmt
, 0),
986 gimple_call_arg (stmt
, 1),
987 gimple_call_arg (stmt
, 2),
989 DECL_FUNCTION_CODE (callee
));
992 case BUILT_IN_STRNCPY_CHK
:
993 case BUILT_IN_STPNCPY_CHK
:
994 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
995 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
996 gimple_call_arg (stmt
, 1),
997 gimple_call_arg (stmt
, 2),
998 gimple_call_arg (stmt
, 3),
1000 DECL_FUNCTION_CODE (callee
));
1003 case BUILT_IN_SNPRINTF_CHK
:
1004 case BUILT_IN_VSNPRINTF_CHK
:
1005 if (val
[1] && is_gimple_val (val
[1]))
1006 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1007 DECL_FUNCTION_CODE (callee
));
1014 if (result
&& ignore
)
1015 result
= fold_ignored_result (result
);
1020 /* Return a binfo to be used for devirtualization of calls based on an object
1021 represented by a declaration (i.e. a global or automatically allocated one)
1022 or NULL if it cannot be found or is not safe. CST is expected to be an
1023 ADDR_EXPR of such object or the function will return NULL. Currently it is
1024 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1025 EXPECTED_TYPE is type of the class virtual belongs to. */
1028 gimple_extract_devirt_binfo_from_cst (tree cst
, tree expected_type
)
1030 HOST_WIDE_INT offset
, size
, max_size
;
1031 tree base
, type
, binfo
;
1032 bool last_artificial
= false;
1034 if (!flag_devirtualize
1035 || TREE_CODE (cst
) != ADDR_EXPR
1036 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1039 cst
= TREE_OPERAND (cst
, 0);
1040 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1041 type
= TREE_TYPE (base
);
1045 || TREE_CODE (type
) != RECORD_TYPE
)
1048 /* Find the sub-object the constant actually refers to and mark whether it is
1049 an artificial one (as opposed to a user-defined one). */
1052 HOST_WIDE_INT pos
, size
;
1055 if (types_same_for_odr (type
, expected_type
))
1060 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1062 if (TREE_CODE (fld
) != FIELD_DECL
)
1065 pos
= int_bit_position (fld
);
1066 size
= tree_low_cst (DECL_SIZE (fld
), 1);
1067 if (pos
<= offset
&& (pos
+ size
) > offset
)
1070 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1073 last_artificial
= DECL_ARTIFICIAL (fld
);
1074 type
= TREE_TYPE (fld
);
1077 /* Artificial sub-objects are ancestors, we do not want to use them for
1078 devirtualization, at least not here. */
1079 if (last_artificial
)
1081 binfo
= TYPE_BINFO (type
);
1082 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1088 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1089 The statement may be replaced by another statement, e.g., if the call
1090 simplifies to a constant value. Return true if any changes were made.
1091 It is assumed that the operands have been previously folded. */
1094 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1096 gimple stmt
= gsi_stmt (*gsi
);
1098 bool changed
= false;
1101 /* Fold *& in call arguments. */
1102 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1103 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1105 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1108 gimple_call_set_arg (stmt
, i
, tmp
);
1113 /* Check for virtual calls that became direct calls. */
1114 callee
= gimple_call_fn (stmt
);
1115 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1117 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1119 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1122 else if (virtual_method_call_p (callee
))
1124 tree obj
= OBJ_TYPE_REF_OBJECT (callee
);
1125 tree binfo
= gimple_extract_devirt_binfo_from_cst
1126 (obj
, obj_type_ref_class (callee
));
1130 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1131 tree fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
);
1134 gimple_call_set_fndecl (stmt
, fndecl
);
1144 /* Check for builtins that CCP can handle using information not
1145 available in the generic fold routines. */
1146 callee
= gimple_call_fndecl (stmt
);
1147 if (callee
&& DECL_BUILT_IN (callee
))
1149 tree result
= gimple_fold_builtin (stmt
);
1152 if (!update_call_from_tree (gsi
, result
))
1153 gimplify_and_update_call_from_tree (gsi
, result
);
1156 else if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1157 changed
|= targetm
.gimple_fold_builtin (gsi
);
1163 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1164 distinguishes both cases. */
1167 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1169 bool changed
= false;
1170 gimple stmt
= gsi_stmt (*gsi
);
1173 /* Fold the main computation performed by the statement. */
1174 switch (gimple_code (stmt
))
1178 unsigned old_num_ops
= gimple_num_ops (stmt
);
1179 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1180 tree lhs
= gimple_assign_lhs (stmt
);
1182 /* First canonicalize operand order. This avoids building new
1183 trees if this is the only thing fold would later do. */
1184 if ((commutative_tree_code (subcode
)
1185 || commutative_ternary_tree_code (subcode
))
1186 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1187 gimple_assign_rhs2 (stmt
), false))
1189 tree tem
= gimple_assign_rhs1 (stmt
);
1190 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1191 gimple_assign_set_rhs2 (stmt
, tem
);
1194 new_rhs
= fold_gimple_assign (gsi
);
1196 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1197 TREE_TYPE (new_rhs
)))
1198 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1201 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1203 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1210 changed
|= fold_gimple_cond (stmt
);
1214 changed
|= gimple_fold_call (gsi
, inplace
);
1218 /* Fold *& in asm operands. */
1221 const char **oconstraints
;
1222 const char *constraint
;
1223 bool allows_mem
, allows_reg
;
1225 noutputs
= gimple_asm_noutputs (stmt
);
1226 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1228 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1230 tree link
= gimple_asm_output_op (stmt
, i
);
1231 tree op
= TREE_VALUE (link
);
1233 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1234 if (REFERENCE_CLASS_P (op
)
1235 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1237 TREE_VALUE (link
) = op
;
1241 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1243 tree link
= gimple_asm_input_op (stmt
, i
);
1244 tree op
= TREE_VALUE (link
);
1246 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1247 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1248 oconstraints
, &allows_mem
, &allows_reg
);
1249 if (REFERENCE_CLASS_P (op
)
1250 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1253 TREE_VALUE (link
) = op
;
1261 if (gimple_debug_bind_p (stmt
))
1263 tree val
= gimple_debug_bind_get_value (stmt
);
1265 && REFERENCE_CLASS_P (val
))
1267 tree tem
= maybe_fold_reference (val
, false);
1270 gimple_debug_bind_set_value (stmt
, tem
);
1275 && TREE_CODE (val
) == ADDR_EXPR
)
1277 tree ref
= TREE_OPERAND (val
, 0);
1278 tree tem
= maybe_fold_reference (ref
, false);
1281 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1282 gimple_debug_bind_set_value (stmt
, tem
);
1292 stmt
= gsi_stmt (*gsi
);
1294 /* Fold *& on the lhs. */
1295 if (gimple_has_lhs (stmt
))
1297 tree lhs
= gimple_get_lhs (stmt
);
1298 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1300 tree new_lhs
= maybe_fold_reference (lhs
, true);
1303 gimple_set_lhs (stmt
, new_lhs
);
1312 /* Fold the statement pointed to by GSI. In some cases, this function may
1313 replace the whole statement with a new one. Returns true iff folding
1315 The statement pointed to by GSI should be in valid gimple form but may
1316 be in unfolded state as resulting from for example constant propagation
1317 which can produce *&x = 0. */
1320 fold_stmt (gimple_stmt_iterator
*gsi
)
1322 return fold_stmt_1 (gsi
, false);
1325 /* Perform the minimal folding on statement *GSI. Only operations like
1326 *&x created by constant propagation are handled. The statement cannot
1327 be replaced with a new one. Return true if the statement was
1328 changed, false otherwise.
1329 The statement *GSI should be in valid gimple form but may
1330 be in unfolded state as resulting from for example constant propagation
1331 which can produce *&x = 0. */
1334 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1336 gimple stmt
= gsi_stmt (*gsi
);
1337 bool changed
= fold_stmt_1 (gsi
, true);
1338 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1342 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1343 if EXPR is null or we don't know how.
1344 If non-null, the result always has boolean type. */
1347 canonicalize_bool (tree expr
, bool invert
)
1353 if (integer_nonzerop (expr
))
1354 return boolean_false_node
;
1355 else if (integer_zerop (expr
))
1356 return boolean_true_node
;
1357 else if (TREE_CODE (expr
) == SSA_NAME
)
1358 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1359 build_int_cst (TREE_TYPE (expr
), 0));
1360 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1361 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1363 TREE_OPERAND (expr
, 0),
1364 TREE_OPERAND (expr
, 1));
1370 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1372 if (integer_nonzerop (expr
))
1373 return boolean_true_node
;
1374 else if (integer_zerop (expr
))
1375 return boolean_false_node
;
1376 else if (TREE_CODE (expr
) == SSA_NAME
)
1377 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1378 build_int_cst (TREE_TYPE (expr
), 0));
1379 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1380 return fold_build2 (TREE_CODE (expr
),
1382 TREE_OPERAND (expr
, 0),
1383 TREE_OPERAND (expr
, 1));
1389 /* Check to see if a boolean expression EXPR is logically equivalent to the
1390 comparison (OP1 CODE OP2). Check for various identities involving
1394 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1395 const_tree op1
, const_tree op2
)
1399 /* The obvious case. */
1400 if (TREE_CODE (expr
) == code
1401 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1402 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1405 /* Check for comparing (name, name != 0) and the case where expr
1406 is an SSA_NAME with a definition matching the comparison. */
1407 if (TREE_CODE (expr
) == SSA_NAME
1408 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1410 if (operand_equal_p (expr
, op1
, 0))
1411 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1412 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1413 s
= SSA_NAME_DEF_STMT (expr
);
1414 if (is_gimple_assign (s
)
1415 && gimple_assign_rhs_code (s
) == code
1416 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1417 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1421 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1422 of name is a comparison, recurse. */
1423 if (TREE_CODE (op1
) == SSA_NAME
1424 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1426 s
= SSA_NAME_DEF_STMT (op1
);
1427 if (is_gimple_assign (s
)
1428 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1430 enum tree_code c
= gimple_assign_rhs_code (s
);
1431 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1432 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1433 return same_bool_comparison_p (expr
, c
,
1434 gimple_assign_rhs1 (s
),
1435 gimple_assign_rhs2 (s
));
1436 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1437 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1438 return same_bool_comparison_p (expr
,
1439 invert_tree_comparison (c
, false),
1440 gimple_assign_rhs1 (s
),
1441 gimple_assign_rhs2 (s
));
1447 /* Check to see if two boolean expressions OP1 and OP2 are logically
1451 same_bool_result_p (const_tree op1
, const_tree op2
)
1453 /* Simple cases first. */
1454 if (operand_equal_p (op1
, op2
, 0))
1457 /* Check the cases where at least one of the operands is a comparison.
1458 These are a bit smarter than operand_equal_p in that they apply some
1459 identifies on SSA_NAMEs. */
1460 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1461 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1462 TREE_OPERAND (op2
, 0),
1463 TREE_OPERAND (op2
, 1)))
1465 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1466 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1467 TREE_OPERAND (op1
, 0),
1468 TREE_OPERAND (op1
, 1)))
1475 /* Forward declarations for some mutually recursive functions. */
1478 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1479 enum tree_code code2
, tree op2a
, tree op2b
);
1481 and_var_with_comparison (tree var
, bool invert
,
1482 enum tree_code code2
, tree op2a
, tree op2b
);
1484 and_var_with_comparison_1 (gimple stmt
,
1485 enum tree_code code2
, tree op2a
, tree op2b
);
1487 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1488 enum tree_code code2
, tree op2a
, tree op2b
);
1490 or_var_with_comparison (tree var
, bool invert
,
1491 enum tree_code code2
, tree op2a
, tree op2b
);
1493 or_var_with_comparison_1 (gimple stmt
,
1494 enum tree_code code2
, tree op2a
, tree op2b
);
1496 /* Helper function for and_comparisons_1: try to simplify the AND of the
1497 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1498 If INVERT is true, invert the value of the VAR before doing the AND.
1499 Return NULL_EXPR if we can't simplify this to a single expression. */
1502 and_var_with_comparison (tree var
, bool invert
,
1503 enum tree_code code2
, tree op2a
, tree op2b
)
1506 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1508 /* We can only deal with variables whose definitions are assignments. */
1509 if (!is_gimple_assign (stmt
))
1512 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1513 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1514 Then we only have to consider the simpler non-inverted cases. */
1516 t
= or_var_with_comparison_1 (stmt
,
1517 invert_tree_comparison (code2
, false),
1520 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1521 return canonicalize_bool (t
, invert
);
1524 /* Try to simplify the AND of the ssa variable defined by the assignment
1525 STMT with the comparison specified by (OP2A CODE2 OP2B).
1526 Return NULL_EXPR if we can't simplify this to a single expression. */
1529 and_var_with_comparison_1 (gimple stmt
,
1530 enum tree_code code2
, tree op2a
, tree op2b
)
1532 tree var
= gimple_assign_lhs (stmt
);
1533 tree true_test_var
= NULL_TREE
;
1534 tree false_test_var
= NULL_TREE
;
1535 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1537 /* Check for identities like (var AND (var == 0)) => false. */
1538 if (TREE_CODE (op2a
) == SSA_NAME
1539 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1541 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1542 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1544 true_test_var
= op2a
;
1545 if (var
== true_test_var
)
1548 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1549 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1551 false_test_var
= op2a
;
1552 if (var
== false_test_var
)
1553 return boolean_false_node
;
1557 /* If the definition is a comparison, recurse on it. */
1558 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1560 tree t
= and_comparisons_1 (innercode
,
1561 gimple_assign_rhs1 (stmt
),
1562 gimple_assign_rhs2 (stmt
),
1570 /* If the definition is an AND or OR expression, we may be able to
1571 simplify by reassociating. */
1572 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1573 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1575 tree inner1
= gimple_assign_rhs1 (stmt
);
1576 tree inner2
= gimple_assign_rhs2 (stmt
);
1579 tree partial
= NULL_TREE
;
1580 bool is_and
= (innercode
== BIT_AND_EXPR
);
1582 /* Check for boolean identities that don't require recursive examination
1584 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1585 inner1 AND (inner1 OR inner2) => inner1
1586 !inner1 AND (inner1 AND inner2) => false
1587 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1588 Likewise for similar cases involving inner2. */
1589 if (inner1
== true_test_var
)
1590 return (is_and
? var
: inner1
);
1591 else if (inner2
== true_test_var
)
1592 return (is_and
? var
: inner2
);
1593 else if (inner1
== false_test_var
)
1595 ? boolean_false_node
1596 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1597 else if (inner2
== false_test_var
)
1599 ? boolean_false_node
1600 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1602 /* Next, redistribute/reassociate the AND across the inner tests.
1603 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1604 if (TREE_CODE (inner1
) == SSA_NAME
1605 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1606 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1607 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1608 gimple_assign_rhs1 (s
),
1609 gimple_assign_rhs2 (s
),
1610 code2
, op2a
, op2b
)))
1612 /* Handle the AND case, where we are reassociating:
1613 (inner1 AND inner2) AND (op2a code2 op2b)
1615 If the partial result t is a constant, we win. Otherwise
1616 continue on to try reassociating with the other inner test. */
1619 if (integer_onep (t
))
1621 else if (integer_zerop (t
))
1622 return boolean_false_node
;
1625 /* Handle the OR case, where we are redistributing:
1626 (inner1 OR inner2) AND (op2a code2 op2b)
1627 => (t OR (inner2 AND (op2a code2 op2b))) */
1628 else if (integer_onep (t
))
1629 return boolean_true_node
;
1631 /* Save partial result for later. */
1635 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1636 if (TREE_CODE (inner2
) == SSA_NAME
1637 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1638 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1639 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1640 gimple_assign_rhs1 (s
),
1641 gimple_assign_rhs2 (s
),
1642 code2
, op2a
, op2b
)))
1644 /* Handle the AND case, where we are reassociating:
1645 (inner1 AND inner2) AND (op2a code2 op2b)
1646 => (inner1 AND t) */
1649 if (integer_onep (t
))
1651 else if (integer_zerop (t
))
1652 return boolean_false_node
;
1653 /* If both are the same, we can apply the identity
1655 else if (partial
&& same_bool_result_p (t
, partial
))
1659 /* Handle the OR case. where we are redistributing:
1660 (inner1 OR inner2) AND (op2a code2 op2b)
1661 => (t OR (inner1 AND (op2a code2 op2b)))
1662 => (t OR partial) */
1665 if (integer_onep (t
))
1666 return boolean_true_node
;
1669 /* We already got a simplification for the other
1670 operand to the redistributed OR expression. The
1671 interesting case is when at least one is false.
1672 Or, if both are the same, we can apply the identity
1674 if (integer_zerop (partial
))
1676 else if (integer_zerop (t
))
1678 else if (same_bool_result_p (t
, partial
))
1687 /* Try to simplify the AND of two comparisons defined by
1688 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1689 If this can be done without constructing an intermediate value,
1690 return the resulting tree; otherwise NULL_TREE is returned.
1691 This function is deliberately asymmetric as it recurses on SSA_DEFs
1692 in the first comparison but not the second. */
1695 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1696 enum tree_code code2
, tree op2a
, tree op2b
)
1698 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
1700 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1701 if (operand_equal_p (op1a
, op2a
, 0)
1702 && operand_equal_p (op1b
, op2b
, 0))
1704 /* Result will be either NULL_TREE, or a combined comparison. */
1705 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1706 TRUTH_ANDIF_EXPR
, code1
, code2
,
1707 truth_type
, op1a
, op1b
);
1712 /* Likewise the swapped case of the above. */
1713 if (operand_equal_p (op1a
, op2b
, 0)
1714 && operand_equal_p (op1b
, op2a
, 0))
1716 /* Result will be either NULL_TREE, or a combined comparison. */
1717 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1718 TRUTH_ANDIF_EXPR
, code1
,
1719 swap_tree_comparison (code2
),
1720 truth_type
, op1a
, op1b
);
1725 /* If both comparisons are of the same value against constants, we might
1726 be able to merge them. */
1727 if (operand_equal_p (op1a
, op2a
, 0)
1728 && TREE_CODE (op1b
) == INTEGER_CST
1729 && TREE_CODE (op2b
) == INTEGER_CST
)
1731 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1733 /* If we have (op1a == op1b), we should either be able to
1734 return that or FALSE, depending on whether the constant op1b
1735 also satisfies the other comparison against op2b. */
1736 if (code1
== EQ_EXPR
)
1742 case EQ_EXPR
: val
= (cmp
== 0); break;
1743 case NE_EXPR
: val
= (cmp
!= 0); break;
1744 case LT_EXPR
: val
= (cmp
< 0); break;
1745 case GT_EXPR
: val
= (cmp
> 0); break;
1746 case LE_EXPR
: val
= (cmp
<= 0); break;
1747 case GE_EXPR
: val
= (cmp
>= 0); break;
1748 default: done
= false;
1753 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1755 return boolean_false_node
;
1758 /* Likewise if the second comparison is an == comparison. */
1759 else if (code2
== EQ_EXPR
)
1765 case EQ_EXPR
: val
= (cmp
== 0); break;
1766 case NE_EXPR
: val
= (cmp
!= 0); break;
1767 case LT_EXPR
: val
= (cmp
> 0); break;
1768 case GT_EXPR
: val
= (cmp
< 0); break;
1769 case LE_EXPR
: val
= (cmp
>= 0); break;
1770 case GE_EXPR
: val
= (cmp
<= 0); break;
1771 default: done
= false;
1776 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1778 return boolean_false_node
;
1782 /* Same business with inequality tests. */
1783 else if (code1
== NE_EXPR
)
1788 case EQ_EXPR
: val
= (cmp
!= 0); break;
1789 case NE_EXPR
: val
= (cmp
== 0); break;
1790 case LT_EXPR
: val
= (cmp
>= 0); break;
1791 case GT_EXPR
: val
= (cmp
<= 0); break;
1792 case LE_EXPR
: val
= (cmp
> 0); break;
1793 case GE_EXPR
: val
= (cmp
< 0); break;
1798 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1800 else if (code2
== NE_EXPR
)
1805 case EQ_EXPR
: val
= (cmp
== 0); break;
1806 case NE_EXPR
: val
= (cmp
!= 0); break;
1807 case LT_EXPR
: val
= (cmp
<= 0); break;
1808 case GT_EXPR
: val
= (cmp
>= 0); break;
1809 case LE_EXPR
: val
= (cmp
< 0); break;
1810 case GE_EXPR
: val
= (cmp
> 0); break;
1815 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1818 /* Chose the more restrictive of two < or <= comparisons. */
1819 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1820 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1822 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1823 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1825 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1828 /* Likewise chose the more restrictive of two > or >= comparisons. */
1829 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1830 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1832 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1833 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1835 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1838 /* Check for singleton ranges. */
1840 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1841 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1842 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1844 /* Check for disjoint ranges. */
1846 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1847 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1848 return boolean_false_node
;
1850 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1851 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1852 return boolean_false_node
;
1855 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1856 NAME's definition is a truth value. See if there are any simplifications
1857 that can be done against the NAME's definition. */
1858 if (TREE_CODE (op1a
) == SSA_NAME
1859 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1860 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1862 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1863 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1864 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1865 switch (gimple_code (stmt
))
1868 /* Try to simplify by copy-propagating the definition. */
1869 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1872 /* If every argument to the PHI produces the same result when
1873 ANDed with the second comparison, we win.
1874 Do not do this unless the type is bool since we need a bool
1875 result here anyway. */
1876 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1878 tree result
= NULL_TREE
;
1880 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1882 tree arg
= gimple_phi_arg_def (stmt
, i
);
1884 /* If this PHI has itself as an argument, ignore it.
1885 If all the other args produce the same result,
1887 if (arg
== gimple_phi_result (stmt
))
1889 else if (TREE_CODE (arg
) == INTEGER_CST
)
1891 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1894 result
= boolean_false_node
;
1895 else if (!integer_zerop (result
))
1899 result
= fold_build2 (code2
, boolean_type_node
,
1901 else if (!same_bool_comparison_p (result
,
1905 else if (TREE_CODE (arg
) == SSA_NAME
1906 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1909 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1910 /* In simple cases we can look through PHI nodes,
1911 but we have to be careful with loops.
1913 if (! dom_info_available_p (CDI_DOMINATORS
)
1914 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1915 || dominated_by_p (CDI_DOMINATORS
,
1916 gimple_bb (def_stmt
),
1919 temp
= and_var_with_comparison (arg
, invert
, code2
,
1925 else if (!same_bool_result_p (result
, temp
))
1941 /* Try to simplify the AND of two comparisons, specified by
1942 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1943 If this can be simplified to a single expression (without requiring
1944 introducing more SSA variables to hold intermediate values),
1945 return the resulting tree. Otherwise return NULL_TREE.
1946 If the result expression is non-null, it has boolean type. */
1949 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1950 enum tree_code code2
, tree op2a
, tree op2b
)
1952 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1956 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1959 /* Helper function for or_comparisons_1: try to simplify the OR of the
1960 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1961 If INVERT is true, invert the value of VAR before doing the OR.
1962 Return NULL_EXPR if we can't simplify this to a single expression. */
1965 or_var_with_comparison (tree var
, bool invert
,
1966 enum tree_code code2
, tree op2a
, tree op2b
)
1969 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1971 /* We can only deal with variables whose definitions are assignments. */
1972 if (!is_gimple_assign (stmt
))
1975 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1976 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1977 Then we only have to consider the simpler non-inverted cases. */
1979 t
= and_var_with_comparison_1 (stmt
,
1980 invert_tree_comparison (code2
, false),
1983 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1984 return canonicalize_bool (t
, invert
);
1987 /* Try to simplify the OR of the ssa variable defined by the assignment
1988 STMT with the comparison specified by (OP2A CODE2 OP2B).
1989 Return NULL_EXPR if we can't simplify this to a single expression. */
1992 or_var_with_comparison_1 (gimple stmt
,
1993 enum tree_code code2
, tree op2a
, tree op2b
)
1995 tree var
= gimple_assign_lhs (stmt
);
1996 tree true_test_var
= NULL_TREE
;
1997 tree false_test_var
= NULL_TREE
;
1998 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2000 /* Check for identities like (var OR (var != 0)) => true . */
2001 if (TREE_CODE (op2a
) == SSA_NAME
2002 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2004 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2005 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2007 true_test_var
= op2a
;
2008 if (var
== true_test_var
)
2011 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2012 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2014 false_test_var
= op2a
;
2015 if (var
== false_test_var
)
2016 return boolean_true_node
;
2020 /* If the definition is a comparison, recurse on it. */
2021 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2023 tree t
= or_comparisons_1 (innercode
,
2024 gimple_assign_rhs1 (stmt
),
2025 gimple_assign_rhs2 (stmt
),
2033 /* If the definition is an AND or OR expression, we may be able to
2034 simplify by reassociating. */
2035 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2036 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2038 tree inner1
= gimple_assign_rhs1 (stmt
);
2039 tree inner2
= gimple_assign_rhs2 (stmt
);
2042 tree partial
= NULL_TREE
;
2043 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2045 /* Check for boolean identities that don't require recursive examination
2047 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2048 inner1 OR (inner1 AND inner2) => inner1
2049 !inner1 OR (inner1 OR inner2) => true
2050 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2052 if (inner1
== true_test_var
)
2053 return (is_or
? var
: inner1
);
2054 else if (inner2
== true_test_var
)
2055 return (is_or
? var
: inner2
);
2056 else if (inner1
== false_test_var
)
2059 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2060 else if (inner2
== false_test_var
)
2063 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2065 /* Next, redistribute/reassociate the OR across the inner tests.
2066 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2067 if (TREE_CODE (inner1
) == SSA_NAME
2068 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2069 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2070 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2071 gimple_assign_rhs1 (s
),
2072 gimple_assign_rhs2 (s
),
2073 code2
, op2a
, op2b
)))
2075 /* Handle the OR case, where we are reassociating:
2076 (inner1 OR inner2) OR (op2a code2 op2b)
2078 If the partial result t is a constant, we win. Otherwise
2079 continue on to try reassociating with the other inner test. */
2082 if (integer_onep (t
))
2083 return boolean_true_node
;
2084 else if (integer_zerop (t
))
2088 /* Handle the AND case, where we are redistributing:
2089 (inner1 AND inner2) OR (op2a code2 op2b)
2090 => (t AND (inner2 OR (op2a code op2b))) */
2091 else if (integer_zerop (t
))
2092 return boolean_false_node
;
2094 /* Save partial result for later. */
2098 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2099 if (TREE_CODE (inner2
) == SSA_NAME
2100 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2101 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2102 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2103 gimple_assign_rhs1 (s
),
2104 gimple_assign_rhs2 (s
),
2105 code2
, op2a
, op2b
)))
2107 /* Handle the OR case, where we are reassociating:
2108 (inner1 OR inner2) OR (op2a code2 op2b)
2110 => (t OR partial) */
2113 if (integer_zerop (t
))
2115 else if (integer_onep (t
))
2116 return boolean_true_node
;
2117 /* If both are the same, we can apply the identity
2119 else if (partial
&& same_bool_result_p (t
, partial
))
2123 /* Handle the AND case, where we are redistributing:
2124 (inner1 AND inner2) OR (op2a code2 op2b)
2125 => (t AND (inner1 OR (op2a code2 op2b)))
2126 => (t AND partial) */
2129 if (integer_zerop (t
))
2130 return boolean_false_node
;
2133 /* We already got a simplification for the other
2134 operand to the redistributed AND expression. The
2135 interesting case is when at least one is true.
2136 Or, if both are the same, we can apply the identity
2138 if (integer_onep (partial
))
2140 else if (integer_onep (t
))
2142 else if (same_bool_result_p (t
, partial
))
2151 /* Try to simplify the OR of two comparisons defined by
2152 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2153 If this can be done without constructing an intermediate value,
2154 return the resulting tree; otherwise NULL_TREE is returned.
2155 This function is deliberately asymmetric as it recurses on SSA_DEFs
2156 in the first comparison but not the second. */
2159 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2160 enum tree_code code2
, tree op2a
, tree op2b
)
2162 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
2164 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2165 if (operand_equal_p (op1a
, op2a
, 0)
2166 && operand_equal_p (op1b
, op2b
, 0))
2168 /* Result will be either NULL_TREE, or a combined comparison. */
2169 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2170 TRUTH_ORIF_EXPR
, code1
, code2
,
2171 truth_type
, op1a
, op1b
);
2176 /* Likewise the swapped case of the above. */
2177 if (operand_equal_p (op1a
, op2b
, 0)
2178 && operand_equal_p (op1b
, op2a
, 0))
2180 /* Result will be either NULL_TREE, or a combined comparison. */
2181 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2182 TRUTH_ORIF_EXPR
, code1
,
2183 swap_tree_comparison (code2
),
2184 truth_type
, op1a
, op1b
);
2189 /* If both comparisons are of the same value against constants, we might
2190 be able to merge them. */
2191 if (operand_equal_p (op1a
, op2a
, 0)
2192 && TREE_CODE (op1b
) == INTEGER_CST
2193 && TREE_CODE (op2b
) == INTEGER_CST
)
2195 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2197 /* If we have (op1a != op1b), we should either be able to
2198 return that or TRUE, depending on whether the constant op1b
2199 also satisfies the other comparison against op2b. */
2200 if (code1
== NE_EXPR
)
2206 case EQ_EXPR
: val
= (cmp
== 0); break;
2207 case NE_EXPR
: val
= (cmp
!= 0); break;
2208 case LT_EXPR
: val
= (cmp
< 0); break;
2209 case GT_EXPR
: val
= (cmp
> 0); break;
2210 case LE_EXPR
: val
= (cmp
<= 0); break;
2211 case GE_EXPR
: val
= (cmp
>= 0); break;
2212 default: done
= false;
2217 return boolean_true_node
;
2219 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2222 /* Likewise if the second comparison is a != comparison. */
2223 else if (code2
== NE_EXPR
)
2229 case EQ_EXPR
: val
= (cmp
== 0); break;
2230 case NE_EXPR
: val
= (cmp
!= 0); break;
2231 case LT_EXPR
: val
= (cmp
> 0); break;
2232 case GT_EXPR
: val
= (cmp
< 0); break;
2233 case LE_EXPR
: val
= (cmp
>= 0); break;
2234 case GE_EXPR
: val
= (cmp
<= 0); break;
2235 default: done
= false;
2240 return boolean_true_node
;
2242 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2246 /* See if an equality test is redundant with the other comparison. */
2247 else if (code1
== EQ_EXPR
)
2252 case EQ_EXPR
: val
= (cmp
== 0); break;
2253 case NE_EXPR
: val
= (cmp
!= 0); break;
2254 case LT_EXPR
: val
= (cmp
< 0); break;
2255 case GT_EXPR
: val
= (cmp
> 0); break;
2256 case LE_EXPR
: val
= (cmp
<= 0); break;
2257 case GE_EXPR
: val
= (cmp
>= 0); break;
2262 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2264 else if (code2
== EQ_EXPR
)
2269 case EQ_EXPR
: val
= (cmp
== 0); break;
2270 case NE_EXPR
: val
= (cmp
!= 0); break;
2271 case LT_EXPR
: val
= (cmp
> 0); break;
2272 case GT_EXPR
: val
= (cmp
< 0); break;
2273 case LE_EXPR
: val
= (cmp
>= 0); break;
2274 case GE_EXPR
: val
= (cmp
<= 0); break;
2279 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2282 /* Chose the less restrictive of two < or <= comparisons. */
2283 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2284 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2286 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2287 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2289 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2292 /* Likewise chose the less restrictive of two > or >= comparisons. */
2293 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2294 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2296 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2297 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2299 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2302 /* Check for singleton ranges. */
2304 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2305 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2306 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2308 /* Check for less/greater pairs that don't restrict the range at all. */
2310 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2311 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2312 return boolean_true_node
;
2314 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2315 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2316 return boolean_true_node
;
2319 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2320 NAME's definition is a truth value. See if there are any simplifications
2321 that can be done against the NAME's definition. */
2322 if (TREE_CODE (op1a
) == SSA_NAME
2323 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2324 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2326 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2327 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2328 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2329 switch (gimple_code (stmt
))
2332 /* Try to simplify by copy-propagating the definition. */
2333 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2336 /* If every argument to the PHI produces the same result when
2337 ORed with the second comparison, we win.
2338 Do not do this unless the type is bool since we need a bool
2339 result here anyway. */
2340 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2342 tree result
= NULL_TREE
;
2344 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2346 tree arg
= gimple_phi_arg_def (stmt
, i
);
2348 /* If this PHI has itself as an argument, ignore it.
2349 If all the other args produce the same result,
2351 if (arg
== gimple_phi_result (stmt
))
2353 else if (TREE_CODE (arg
) == INTEGER_CST
)
2355 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2358 result
= boolean_true_node
;
2359 else if (!integer_onep (result
))
2363 result
= fold_build2 (code2
, boolean_type_node
,
2365 else if (!same_bool_comparison_p (result
,
2369 else if (TREE_CODE (arg
) == SSA_NAME
2370 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2373 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2374 /* In simple cases we can look through PHI nodes,
2375 but we have to be careful with loops.
2377 if (! dom_info_available_p (CDI_DOMINATORS
)
2378 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2379 || dominated_by_p (CDI_DOMINATORS
,
2380 gimple_bb (def_stmt
),
2383 temp
= or_var_with_comparison (arg
, invert
, code2
,
2389 else if (!same_bool_result_p (result
, temp
))
2405 /* Try to simplify the OR of two comparisons, specified by
2406 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2407 If this can be simplified to a single expression (without requiring
2408 introducing more SSA variables to hold intermediate values),
2409 return the resulting tree. Otherwise return NULL_TREE.
2410 If the result expression is non-null, it has boolean type. */
2413 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2414 enum tree_code code2
, tree op2a
, tree op2b
)
2416 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2420 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2424 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2426 Either NULL_TREE, a simplified but non-constant or a constant
2429 ??? This should go into a gimple-fold-inline.h file to be eventually
2430 privatized with the single valueize function used in the various TUs
2431 to avoid the indirect function call overhead. */
2434 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2436 location_t loc
= gimple_location (stmt
);
2437 switch (gimple_code (stmt
))
2441 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2443 switch (get_gimple_rhs_class (subcode
))
2445 case GIMPLE_SINGLE_RHS
:
2447 tree rhs
= gimple_assign_rhs1 (stmt
);
2448 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2450 if (TREE_CODE (rhs
) == SSA_NAME
)
2452 /* If the RHS is an SSA_NAME, return its known constant value,
2454 return (*valueize
) (rhs
);
2456 /* Handle propagating invariant addresses into address
2458 else if (TREE_CODE (rhs
) == ADDR_EXPR
2459 && !is_gimple_min_invariant (rhs
))
2461 HOST_WIDE_INT offset
= 0;
2463 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2467 && (CONSTANT_CLASS_P (base
)
2468 || decl_address_invariant_p (base
)))
2469 return build_invariant_address (TREE_TYPE (rhs
),
2472 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2473 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2474 && (CONSTRUCTOR_NELTS (rhs
)
2475 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2480 vec
= XALLOCAVEC (tree
,
2481 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2482 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2484 val
= (*valueize
) (val
);
2485 if (TREE_CODE (val
) == INTEGER_CST
2486 || TREE_CODE (val
) == REAL_CST
2487 || TREE_CODE (val
) == FIXED_CST
)
2493 return build_vector (TREE_TYPE (rhs
), vec
);
2496 if (kind
== tcc_reference
)
2498 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2499 || TREE_CODE (rhs
) == REALPART_EXPR
2500 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2501 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2503 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2504 return fold_unary_loc (EXPR_LOCATION (rhs
),
2506 TREE_TYPE (rhs
), val
);
2508 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2509 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2511 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2512 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2514 TREE_TYPE (rhs
), val
,
2515 TREE_OPERAND (rhs
, 1),
2516 TREE_OPERAND (rhs
, 2));
2518 else if (TREE_CODE (rhs
) == MEM_REF
2519 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2521 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2522 if (TREE_CODE (val
) == ADDR_EXPR
2523 && is_gimple_min_invariant (val
))
2525 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2527 TREE_OPERAND (rhs
, 1));
2532 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2534 else if (kind
== tcc_declaration
)
2535 return get_symbol_constant_value (rhs
);
2539 case GIMPLE_UNARY_RHS
:
2541 /* Handle unary operators that can appear in GIMPLE form.
2542 Note that we know the single operand must be a constant,
2543 so this should almost always return a simplified RHS. */
2544 tree lhs
= gimple_assign_lhs (stmt
);
2545 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2547 /* Conversions are useless for CCP purposes if they are
2548 value-preserving. Thus the restrictions that
2549 useless_type_conversion_p places for restrict qualification
2550 of pointer types should not apply here.
2551 Substitution later will only substitute to allowed places. */
2552 if (CONVERT_EXPR_CODE_P (subcode
)
2553 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2554 && POINTER_TYPE_P (TREE_TYPE (op0
))
2555 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2556 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2557 && TYPE_MODE (TREE_TYPE (lhs
))
2558 == TYPE_MODE (TREE_TYPE (op0
)))
2562 fold_unary_ignore_overflow_loc (loc
, subcode
,
2563 gimple_expr_type (stmt
), op0
);
2566 case GIMPLE_BINARY_RHS
:
2568 /* Handle binary operators that can appear in GIMPLE form. */
2569 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2570 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2572 /* Translate &x + CST into an invariant form suitable for
2573 further propagation. */
2574 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2575 && TREE_CODE (op0
) == ADDR_EXPR
2576 && TREE_CODE (op1
) == INTEGER_CST
)
2578 tree off
= fold_convert (ptr_type_node
, op1
);
2579 return build_fold_addr_expr_loc
2581 fold_build2 (MEM_REF
,
2582 TREE_TYPE (TREE_TYPE (op0
)),
2583 unshare_expr (op0
), off
));
2586 return fold_binary_loc (loc
, subcode
,
2587 gimple_expr_type (stmt
), op0
, op1
);
2590 case GIMPLE_TERNARY_RHS
:
2592 /* Handle ternary operators that can appear in GIMPLE form. */
2593 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2594 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2595 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2597 /* Fold embedded expressions in ternary codes. */
2598 if ((subcode
== COND_EXPR
2599 || subcode
== VEC_COND_EXPR
)
2600 && COMPARISON_CLASS_P (op0
))
2602 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2603 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2604 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2605 TREE_TYPE (op0
), op00
, op01
);
2610 return fold_ternary_loc (loc
, subcode
,
2611 gimple_expr_type (stmt
), op0
, op1
, op2
);
2623 if (gimple_call_internal_p (stmt
))
2624 /* No folding yet for these functions. */
2627 fn
= (*valueize
) (gimple_call_fn (stmt
));
2628 if (TREE_CODE (fn
) == ADDR_EXPR
2629 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2630 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2632 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2635 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2636 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2637 call
= build_call_array_loc (loc
,
2638 gimple_call_return_type (stmt
),
2639 fn
, gimple_call_num_args (stmt
), args
);
2640 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2642 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2643 STRIP_NOPS (retval
);
2654 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2655 Returns NULL_TREE if folding to a constant is not possible, otherwise
2656 returns a constant according to is_gimple_min_invariant. */
2659 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2661 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2662 if (res
&& is_gimple_min_invariant (res
))
2668 /* The following set of functions are supposed to fold references using
2669 their constant initializers. */
2671 static tree
fold_ctor_reference (tree type
, tree ctor
,
2672 unsigned HOST_WIDE_INT offset
,
2673 unsigned HOST_WIDE_INT size
, tree
);
2675 /* See if we can find constructor defining value of BASE.
2676 When we know the consructor with constant offset (such as
2677 base is array[40] and we do know constructor of array), then
2678 BIT_OFFSET is adjusted accordingly.
2680 As a special case, return error_mark_node when constructor
2681 is not explicitly available, but it is known to be zero
2682 such as 'static const int a;'. */
2684 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2685 tree (*valueize
)(tree
))
2687 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2688 if (TREE_CODE (base
) == MEM_REF
)
2690 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2692 if (!host_integerp (TREE_OPERAND (base
, 1), 0))
2694 *bit_offset
+= (mem_ref_offset (base
).low
2699 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2700 base
= valueize (TREE_OPERAND (base
, 0));
2701 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2703 base
= TREE_OPERAND (base
, 0);
2706 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2707 DECL_INITIAL. If BASE is a nested reference into another
2708 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2709 the inner reference. */
2710 switch (TREE_CODE (base
))
2715 tree init
= ctor_for_folding (base
);
2717 /* Our semantic is exact opposite of ctor_for_folding;
2718 NULL means unknown, while error_mark_node is 0. */
2719 if (init
== error_mark_node
)
2722 return error_mark_node
;
2728 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2729 if (max_size
== -1 || size
!= max_size
)
2731 *bit_offset
+= bit_offset2
;
2732 return get_base_constructor (base
, bit_offset
, valueize
);
2743 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2744 to the memory at bit OFFSET.
2746 We do only simple job of folding byte accesses. */
2749 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2750 unsigned HOST_WIDE_INT offset
,
2751 unsigned HOST_WIDE_INT size
)
2753 if (INTEGRAL_TYPE_P (type
)
2754 && (TYPE_MODE (type
)
2755 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2756 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2758 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2759 && size
== BITS_PER_UNIT
2760 && !(offset
% BITS_PER_UNIT
))
2762 offset
/= BITS_PER_UNIT
;
2763 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2764 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2767 const char a[20]="hello";
2770 might lead to offset greater than string length. In this case we
2771 know value is either initialized to 0 or out of bounds. Return 0
2773 return build_zero_cst (type
);
2778 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2779 SIZE to the memory at bit OFFSET. */
2782 fold_array_ctor_reference (tree type
, tree ctor
,
2783 unsigned HOST_WIDE_INT offset
,
2784 unsigned HOST_WIDE_INT size
,
2787 unsigned HOST_WIDE_INT cnt
;
2789 double_int low_bound
, elt_size
;
2790 double_int index
, max_index
;
2791 double_int access_index
;
2792 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2793 HOST_WIDE_INT inner_offset
;
2795 /* Compute low bound and elt size. */
2796 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2797 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2798 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2800 /* Static constructors for variably sized objects makes no sense. */
2801 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2802 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2803 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2806 low_bound
= double_int_zero
;
2807 /* Static constructors for variably sized objects makes no sense. */
2808 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2811 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2814 /* We can handle only constantly sized accesses that are known to not
2815 be larger than size of array element. */
2816 if (!TYPE_SIZE_UNIT (type
)
2817 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2818 || elt_size
.slt (tree_to_double_int (TYPE_SIZE_UNIT (type
))))
2821 /* Compute the array index we look for. */
2822 access_index
= double_int::from_uhwi (offset
/ BITS_PER_UNIT
)
2823 .udiv (elt_size
, TRUNC_DIV_EXPR
);
2824 access_index
+= low_bound
;
2826 access_index
= access_index
.ext (TYPE_PRECISION (index_type
),
2827 TYPE_UNSIGNED (index_type
));
2829 /* And offset within the access. */
2830 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
2832 /* See if the array field is large enough to span whole access. We do not
2833 care to fold accesses spanning multiple array indexes. */
2834 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
2837 index
= low_bound
- double_int_one
;
2839 index
= index
.ext (TYPE_PRECISION (index_type
), TYPE_UNSIGNED (index_type
));
2841 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2843 /* Array constructor might explicitely set index, or specify range
2844 or leave index NULL meaning that it is next index after previous
2848 if (TREE_CODE (cfield
) == INTEGER_CST
)
2849 max_index
= index
= tree_to_double_int (cfield
);
2852 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2853 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2854 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2859 index
+= double_int_one
;
2861 index
= index
.ext (TYPE_PRECISION (index_type
),
2862 TYPE_UNSIGNED (index_type
));
2866 /* Do we have match? */
2867 if (access_index
.cmp (index
, 1) >= 0
2868 && access_index
.cmp (max_index
, 1) <= 0)
2869 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
2872 /* When memory is not explicitely mentioned in constructor,
2873 it is 0 (or out of range). */
2874 return build_zero_cst (type
);
2877 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2878 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2881 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2882 unsigned HOST_WIDE_INT offset
,
2883 unsigned HOST_WIDE_INT size
,
2886 unsigned HOST_WIDE_INT cnt
;
2889 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2892 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2893 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2894 tree field_size
= DECL_SIZE (cfield
);
2895 double_int bitoffset
;
2896 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2897 double_int bits_per_unit_cst
= double_int::from_uhwi (BITS_PER_UNIT
);
2898 double_int bitoffset_end
, access_end
;
2900 /* Variable sized objects in static constructors makes no sense,
2901 but field_size can be NULL for flexible array members. */
2902 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2903 && TREE_CODE (byte_offset
) == INTEGER_CST
2904 && (field_size
!= NULL_TREE
2905 ? TREE_CODE (field_size
) == INTEGER_CST
2906 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2908 /* Compute bit offset of the field. */
2909 bitoffset
= tree_to_double_int (field_offset
)
2910 + byte_offset_cst
* bits_per_unit_cst
;
2911 /* Compute bit offset where the field ends. */
2912 if (field_size
!= NULL_TREE
)
2913 bitoffset_end
= bitoffset
+ tree_to_double_int (field_size
);
2915 bitoffset_end
= double_int_zero
;
2917 access_end
= double_int::from_uhwi (offset
)
2918 + double_int::from_uhwi (size
);
2920 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2921 [BITOFFSET, BITOFFSET_END)? */
2922 if (access_end
.cmp (bitoffset
, 0) > 0
2923 && (field_size
== NULL_TREE
2924 || double_int::from_uhwi (offset
).slt (bitoffset_end
)))
2926 double_int inner_offset
= double_int::from_uhwi (offset
) - bitoffset
;
2927 /* We do have overlap. Now see if field is large enough to
2928 cover the access. Give up for accesses spanning multiple
2930 if (access_end
.cmp (bitoffset_end
, 0) > 0)
2932 if (double_int::from_uhwi (offset
).slt (bitoffset
))
2934 return fold_ctor_reference (type
, cval
,
2935 inner_offset
.to_uhwi (), size
,
2939 /* When memory is not explicitely mentioned in constructor, it is 0. */
2940 return build_zero_cst (type
);
2943 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2944 to the memory at bit OFFSET. */
2947 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
2948 unsigned HOST_WIDE_INT size
, tree from_decl
)
2952 /* We found the field with exact match. */
2953 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
2955 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
2957 /* We are at the end of walk, see if we can view convert the
2959 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
2960 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2961 && operand_equal_p (TYPE_SIZE (type
),
2962 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
2964 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
2965 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
2970 if (TREE_CODE (ctor
) == STRING_CST
)
2971 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
2972 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
2975 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
2976 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
2977 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
2980 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
2987 /* Return the tree representing the element referenced by T if T is an
2988 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2989 names using VALUEIZE. Return NULL_TREE otherwise. */
2992 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
2994 tree ctor
, idx
, base
;
2995 HOST_WIDE_INT offset
, size
, max_size
;
2998 if (TREE_THIS_VOLATILE (t
))
3001 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3002 return get_symbol_constant_value (t
);
3004 tem
= fold_read_from_constant_string (t
);
3008 switch (TREE_CODE (t
))
3011 case ARRAY_RANGE_REF
:
3012 /* Constant indexes are handled well by get_base_constructor.
3013 Only special case variable offsets.
3014 FIXME: This code can't handle nested references with variable indexes
3015 (they will be handled only by iteration of ccp). Perhaps we can bring
3016 get_ref_base_and_extent here and make it use a valueize callback. */
3017 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3019 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3020 && TREE_CODE (idx
) == INTEGER_CST
)
3022 tree low_bound
, unit_size
;
3025 /* If the resulting bit-offset is constant, track it. */
3026 if ((low_bound
= array_ref_low_bound (t
),
3027 TREE_CODE (low_bound
) == INTEGER_CST
)
3028 && (unit_size
= array_ref_element_size (t
),
3029 host_integerp (unit_size
, 1))
3030 && (doffset
= (TREE_INT_CST (idx
) - TREE_INT_CST (low_bound
))
3031 .sext (TYPE_PRECISION (TREE_TYPE (idx
))),
3032 doffset
.fits_shwi ()))
3034 offset
= doffset
.to_shwi ();
3035 offset
*= TREE_INT_CST_LOW (unit_size
);
3036 offset
*= BITS_PER_UNIT
;
3038 base
= TREE_OPERAND (t
, 0);
3039 ctor
= get_base_constructor (base
, &offset
, valueize
);
3040 /* Empty constructor. Always fold to 0. */
3041 if (ctor
== error_mark_node
)
3042 return build_zero_cst (TREE_TYPE (t
));
3043 /* Out of bound array access. Value is undefined,
3047 /* We can not determine ctor. */
3050 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3051 TREE_INT_CST_LOW (unit_size
)
3060 case TARGET_MEM_REF
:
3062 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3063 ctor
= get_base_constructor (base
, &offset
, valueize
);
3065 /* Empty constructor. Always fold to 0. */
3066 if (ctor
== error_mark_node
)
3067 return build_zero_cst (TREE_TYPE (t
));
3068 /* We do not know precise address. */
3069 if (max_size
== -1 || max_size
!= size
)
3071 /* We can not determine ctor. */
3075 /* Out of bound array access. Value is undefined, but don't fold. */
3079 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3085 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3086 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3087 return fold_build1_loc (EXPR_LOCATION (t
),
3088 TREE_CODE (t
), TREE_TYPE (t
), c
);
3100 fold_const_aggregate_ref (tree t
)
3102 return fold_const_aggregate_ref_1 (t
, NULL
);
3105 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3106 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3107 KNOWN_BINFO carries the binfo describing the true type of
3108 OBJ_TYPE_REF_OBJECT(REF). */
3111 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3113 unsigned HOST_WIDE_INT offset
, size
;
3114 tree v
, fn
, vtable
, init
;
3116 vtable
= v
= BINFO_VTABLE (known_binfo
);
3117 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3121 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3123 offset
= tree_low_cst (TREE_OPERAND (v
, 1), 1) * BITS_PER_UNIT
;
3124 v
= TREE_OPERAND (v
, 0);
3129 if (TREE_CODE (v
) != ADDR_EXPR
)
3131 v
= TREE_OPERAND (v
, 0);
3133 if (TREE_CODE (v
) != VAR_DECL
3134 || !DECL_VIRTUAL_P (v
))
3136 init
= ctor_for_folding (v
);
3138 /* The virtual tables should always be born with constructors.
3139 and we always should assume that they are avaialble for
3140 folding. At the moment we do not stream them in all cases,
3141 but it should never happen that ctor seem unreachable. */
3143 if (init
== error_mark_node
)
3145 gcc_assert (in_lto_p
);
3148 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3149 size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))), 1);
3150 offset
+= token
* size
;
3151 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), init
,
3153 if (!fn
|| integer_zerop (fn
))
3155 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3156 || TREE_CODE (fn
) == FDESC_EXPR
);
3157 fn
= TREE_OPERAND (fn
, 0);
3158 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3160 /* When cgraph node is missing and function is not public, we cannot
3161 devirtualize. This can happen in WHOPR when the actual method
3162 ends up in other partition, because we found devirtualization
3163 possibility too late. */
3164 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3167 /* Make sure we create a cgraph node for functions we'll reference.
3168 They can be non-existent if the reference comes from an entry
3169 of an external vtable for example. */
3170 cgraph_get_create_node (fn
);
3175 /* Return true iff VAL is a gimple expression that is known to be
3176 non-negative. Restricted to floating-point inputs. */
3179 gimple_val_nonnegative_real_p (tree val
)
3183 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3185 /* Use existing logic for non-gimple trees. */
3186 if (tree_expr_nonnegative_p (val
))
3189 if (TREE_CODE (val
) != SSA_NAME
)
3192 /* Currently we look only at the immediately defining statement
3193 to make this determination, since recursion on defining
3194 statements of operands can lead to quadratic behavior in the
3195 worst case. This is expected to catch almost all occurrences
3196 in practice. It would be possible to implement limited-depth
3197 recursion if important cases are lost. Alternatively, passes
3198 that need this information (such as the pow/powi lowering code
3199 in the cse_sincos pass) could be revised to provide it through
3200 dataflow propagation. */
3202 def_stmt
= SSA_NAME_DEF_STMT (val
);
3204 if (is_gimple_assign (def_stmt
))
3208 /* See fold-const.c:tree_expr_nonnegative_p for additional
3209 cases that could be handled with recursion. */
3211 switch (gimple_assign_rhs_code (def_stmt
))
3214 /* Always true for floating-point operands. */
3218 /* True if the two operands are identical (since we are
3219 restricted to floating-point inputs). */
3220 op0
= gimple_assign_rhs1 (def_stmt
);
3221 op1
= gimple_assign_rhs2 (def_stmt
);
3224 || operand_equal_p (op0
, op1
, 0))
3231 else if (is_gimple_call (def_stmt
))
3233 tree fndecl
= gimple_call_fndecl (def_stmt
);
3235 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3239 switch (DECL_FUNCTION_CODE (fndecl
))
3241 CASE_FLT_FN (BUILT_IN_ACOS
):
3242 CASE_FLT_FN (BUILT_IN_ACOSH
):
3243 CASE_FLT_FN (BUILT_IN_CABS
):
3244 CASE_FLT_FN (BUILT_IN_COSH
):
3245 CASE_FLT_FN (BUILT_IN_ERFC
):
3246 CASE_FLT_FN (BUILT_IN_EXP
):
3247 CASE_FLT_FN (BUILT_IN_EXP10
):
3248 CASE_FLT_FN (BUILT_IN_EXP2
):
3249 CASE_FLT_FN (BUILT_IN_FABS
):
3250 CASE_FLT_FN (BUILT_IN_FDIM
):
3251 CASE_FLT_FN (BUILT_IN_HYPOT
):
3252 CASE_FLT_FN (BUILT_IN_POW10
):
3255 CASE_FLT_FN (BUILT_IN_SQRT
):
3256 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3257 nonnegative inputs. */
3258 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3263 CASE_FLT_FN (BUILT_IN_POWI
):
3264 /* True if the second argument is an even integer. */
3265 arg1
= gimple_call_arg (def_stmt
, 1);
3267 if (TREE_CODE (arg1
) == INTEGER_CST
3268 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3273 CASE_FLT_FN (BUILT_IN_POW
):
3274 /* True if the second argument is an even integer-valued
3276 arg1
= gimple_call_arg (def_stmt
, 1);
3278 if (TREE_CODE (arg1
) == REAL_CST
)
3283 c
= TREE_REAL_CST (arg1
);
3284 n
= real_to_integer (&c
);
3288 REAL_VALUE_TYPE cint
;
3289 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3290 if (real_identical (&c
, &cint
))