1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
48 #include "tree-ssa-propagate.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
55 /* Return true when DECL can be referenced from current unit.
56 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
57 We can get declarations that are not possible to reference for various
60 1) When analyzing C++ virtual tables.
61 C++ virtual tables do have known constructors even
62 when they are keyed to other compilation unit.
63 Those tables can contain pointers to methods and vars
64 in other units. Those methods have both STATIC and EXTERNAL
66 2) In WHOPR mode devirtualization might lead to reference
67 to method that was partitioned elsehwere.
68 In this case we have static VAR_DECL or FUNCTION_DECL
69 that has no corresponding callgraph/varpool node
71 3) COMDAT functions referred by external vtables that
72 we devirtualize only during final compilation stage.
73 At this time we already decided that we will not output
74 the function body and thus we can't reference the symbol
78 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
81 struct cgraph_node
*node
;
84 if (DECL_ABSTRACT (decl
))
87 /* We are concerned only about static/external vars and functions. */
88 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
89 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
92 /* Static objects can be referred only if they was not optimized out yet. */
93 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
95 snode
= symtab_get_node (decl
);
98 node
= dyn_cast
<cgraph_node
> (snode
);
99 return !node
|| !node
->global
.inlined_to
;
102 /* We will later output the initializer, so we can refer to it.
103 So we are concerned only when DECL comes from initializer of
106 || TREE_CODE (from_decl
) != VAR_DECL
107 || !DECL_EXTERNAL (from_decl
)
109 && symtab_get_node (from_decl
)->in_other_partition
))
111 /* We are folding reference from external vtable. The vtable may reffer
112 to a symbol keyed to other compilation unit. The other compilation
113 unit may be in separate DSO and the symbol may be hidden. */
114 if (DECL_VISIBILITY_SPECIFIED (decl
)
115 && DECL_EXTERNAL (decl
)
116 && (!(snode
= symtab_get_node (decl
)) || !snode
->in_other_partition
))
118 /* When function is public, we always can introduce new reference.
119 Exception are the COMDAT functions where introducing a direct
120 reference imply need to include function body in the curren tunit. */
121 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
123 /* We are not at ltrans stage; so don't worry about WHOPR.
124 Also when still gimplifying all referred comdat functions will be
127 As observed in PR20991 for already optimized out comdat virtual functions
128 it may be tempting to not necessarily give up because the copy will be
129 output elsewhere when corresponding vtable is output.
130 This is however not possible - ABI specify that COMDATs are output in
131 units where they are used and when the other unit was compiled with LTO
132 it is possible that vtable was kept public while the function itself
134 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
137 /* OK we are seeing either COMDAT or static variable. In this case we must
138 check that the definition is still around so we can refer it. */
139 if (TREE_CODE (decl
) == FUNCTION_DECL
)
141 node
= cgraph_get_node (decl
);
142 /* Check that we still have function body and that we didn't took
143 the decision to eliminate offline copy of the function yet.
144 The second is important when devirtualization happens during final
145 compilation stage when making a new reference no longer makes callee
147 if (!node
|| !node
->definition
|| node
->global
.inlined_to
)
149 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
153 else if (TREE_CODE (decl
) == VAR_DECL
)
155 vnode
= varpool_get_node (decl
);
156 if (!vnode
|| !vnode
->definition
)
158 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
165 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
166 acceptable form for is_gimple_min_invariant.
167 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
170 canonicalize_constructor_val (tree cval
, tree from_decl
)
172 tree orig_cval
= cval
;
174 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
175 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
177 tree ptr
= TREE_OPERAND (cval
, 0);
178 if (is_gimple_min_invariant (ptr
))
179 cval
= build1_loc (EXPR_LOCATION (cval
),
180 ADDR_EXPR
, TREE_TYPE (ptr
),
181 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
183 fold_convert (ptr_type_node
,
184 TREE_OPERAND (cval
, 1))));
186 if (TREE_CODE (cval
) == ADDR_EXPR
)
188 tree base
= NULL_TREE
;
189 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
191 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
193 TREE_OPERAND (cval
, 0) = base
;
196 base
= get_base_address (TREE_OPERAND (cval
, 0));
200 if ((TREE_CODE (base
) == VAR_DECL
201 || TREE_CODE (base
) == FUNCTION_DECL
)
202 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
204 if (TREE_CODE (base
) == VAR_DECL
)
205 TREE_ADDRESSABLE (base
) = 1;
206 else if (TREE_CODE (base
) == FUNCTION_DECL
)
208 /* Make sure we create a cgraph node for functions we'll reference.
209 They can be non-existent if the reference comes from an entry
210 of an external vtable for example. */
211 cgraph_get_create_node (base
);
213 /* Fixup types in global initializers. */
214 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
215 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
217 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
218 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
221 if (TREE_OVERFLOW_P (cval
))
222 return drop_tree_overflow (cval
);
226 /* If SYM is a constant variable with known value, return the value.
227 NULL_TREE is returned otherwise. */
230 get_symbol_constant_value (tree sym
)
232 tree val
= ctor_for_folding (sym
);
233 if (val
!= error_mark_node
)
237 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
238 if (val
&& is_gimple_min_invariant (val
))
243 /* Variables declared 'const' without an initializer
244 have zero as the initializer if they may not be
245 overridden at link or run time. */
247 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
248 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
249 return build_zero_cst (TREE_TYPE (sym
));
257 /* Subroutine of fold_stmt. We perform several simplifications of the
258 memory reference tree EXPR and make sure to re-gimplify them properly
259 after propagation of constant addresses. IS_LHS is true if the
260 reference is supposed to be an lvalue. */
263 maybe_fold_reference (tree expr
, bool is_lhs
)
268 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
269 || TREE_CODE (expr
) == REALPART_EXPR
270 || TREE_CODE (expr
) == IMAGPART_EXPR
)
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
272 return fold_unary_loc (EXPR_LOCATION (expr
),
275 TREE_OPERAND (expr
, 0));
276 else if (TREE_CODE (expr
) == BIT_FIELD_REF
277 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
278 return fold_ternary_loc (EXPR_LOCATION (expr
),
281 TREE_OPERAND (expr
, 0),
282 TREE_OPERAND (expr
, 1),
283 TREE_OPERAND (expr
, 2));
285 while (handled_component_p (*t
))
286 t
= &TREE_OPERAND (*t
, 0);
288 /* Canonicalize MEM_REFs invariant address operand. Do this first
289 to avoid feeding non-canonical MEM_REFs elsewhere. */
290 if (TREE_CODE (*t
) == MEM_REF
291 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
293 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
294 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
295 TREE_OPERAND (*t
, 0),
296 TREE_OPERAND (*t
, 1));
299 TREE_THIS_VOLATILE (tem
) = volatile_p
;
301 tem
= maybe_fold_reference (expr
, is_lhs
);
309 && (result
= fold_const_aggregate_ref (expr
))
310 && is_gimple_min_invariant (result
))
313 /* Fold back MEM_REFs to reference trees. */
314 if (TREE_CODE (*t
) == MEM_REF
315 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
316 && integer_zerop (TREE_OPERAND (*t
, 1))
317 && (TREE_THIS_VOLATILE (*t
)
318 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
319 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
320 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
321 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
322 /* We have to look out here to not drop a required conversion
323 from the rhs to the lhs if is_lhs, but we don't have the
324 rhs here to verify that. Thus require strict type
326 && types_compatible_p (TREE_TYPE (*t
),
327 TREE_TYPE (TREE_OPERAND
328 (TREE_OPERAND (*t
, 0), 0))))
331 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
332 tem
= maybe_fold_reference (expr
, is_lhs
);
337 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
339 tree tem
= maybe_fold_tmr (*t
);
343 tem
= maybe_fold_reference (expr
, is_lhs
);
354 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
355 replacement rhs for the statement or NULL_TREE if no simplification
356 could be made. It is assumed that the operands have been previously
360 fold_gimple_assign (gimple_stmt_iterator
*si
)
362 gimple stmt
= gsi_stmt (*si
);
363 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
364 location_t loc
= gimple_location (stmt
);
366 tree result
= NULL_TREE
;
368 switch (get_gimple_rhs_class (subcode
))
370 case GIMPLE_SINGLE_RHS
:
372 tree rhs
= gimple_assign_rhs1 (stmt
);
374 if (REFERENCE_CLASS_P (rhs
))
375 return maybe_fold_reference (rhs
, false);
377 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
379 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
380 if (is_gimple_min_invariant (val
))
382 else if (flag_devirtualize
&& virtual_method_call_p (val
))
385 vec
<cgraph_node
*>targets
386 = possible_polymorphic_call_targets (val
, &final
);
387 if (final
&& targets
.length () <= 1)
390 if (targets
.length () == 1)
391 fndecl
= targets
[0]->decl
;
393 fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
394 val
= fold_convert (TREE_TYPE (val
), fndecl
);
395 STRIP_USELESS_TYPE_CONVERSION (val
);
401 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
403 tree ref
= TREE_OPERAND (rhs
, 0);
404 tree tem
= maybe_fold_reference (ref
, true);
406 && TREE_CODE (tem
) == MEM_REF
407 && integer_zerop (TREE_OPERAND (tem
, 1)))
408 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
410 result
= fold_convert (TREE_TYPE (rhs
),
411 build_fold_addr_expr_loc (loc
, tem
));
412 else if (TREE_CODE (ref
) == MEM_REF
413 && integer_zerop (TREE_OPERAND (ref
, 1)))
414 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
417 else if (TREE_CODE (rhs
) == CONSTRUCTOR
418 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
419 && (CONSTRUCTOR_NELTS (rhs
)
420 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
422 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
426 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
427 if (TREE_CODE (val
) != INTEGER_CST
428 && TREE_CODE (val
) != REAL_CST
429 && TREE_CODE (val
) != FIXED_CST
)
432 return build_vector_from_ctor (TREE_TYPE (rhs
),
433 CONSTRUCTOR_ELTS (rhs
));
436 else if (DECL_P (rhs
))
437 return get_symbol_constant_value (rhs
);
439 /* If we couldn't fold the RHS, hand over to the generic
441 if (result
== NULL_TREE
)
444 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
445 that may have been added by fold, and "useless" type
446 conversions that might now be apparent due to propagation. */
447 STRIP_USELESS_TYPE_CONVERSION (result
);
449 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
456 case GIMPLE_UNARY_RHS
:
458 tree rhs
= gimple_assign_rhs1 (stmt
);
460 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
463 /* If the operation was a conversion do _not_ mark a
464 resulting constant with TREE_OVERFLOW if the original
465 constant was not. These conversions have implementation
466 defined behavior and retaining the TREE_OVERFLOW flag
467 here would confuse later passes such as VRP. */
468 if (CONVERT_EXPR_CODE_P (subcode
)
469 && TREE_CODE (result
) == INTEGER_CST
470 && TREE_CODE (rhs
) == INTEGER_CST
)
471 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
473 STRIP_USELESS_TYPE_CONVERSION (result
);
474 if (valid_gimple_rhs_p (result
))
480 case GIMPLE_BINARY_RHS
:
481 /* Try to canonicalize for boolean-typed X the comparisons
482 X == 0, X == 1, X != 0, and X != 1. */
483 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
484 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
486 tree lhs
= gimple_assign_lhs (stmt
);
487 tree op1
= gimple_assign_rhs1 (stmt
);
488 tree op2
= gimple_assign_rhs2 (stmt
);
489 tree type
= TREE_TYPE (op1
);
491 /* Check whether the comparison operands are of the same boolean
492 type as the result type is.
493 Check that second operand is an integer-constant with value
495 if (TREE_CODE (op2
) == INTEGER_CST
496 && (integer_zerop (op2
) || integer_onep (op2
))
497 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
499 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
500 bool is_logical_not
= false;
502 /* X == 0 and X != 1 is a logical-not.of X
503 X == 1 and X != 0 is X */
504 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
505 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
506 is_logical_not
= true;
508 if (is_logical_not
== false)
510 /* Only for one-bit precision typed X the transformation
511 !X -> ~X is valied. */
512 else if (TYPE_PRECISION (type
) == 1)
513 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
515 /* Otherwise we use !X -> X ^ 1. */
517 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
518 type
, op1
, build_int_cst (type
, 1));
524 result
= fold_binary_loc (loc
, subcode
,
525 TREE_TYPE (gimple_assign_lhs (stmt
)),
526 gimple_assign_rhs1 (stmt
),
527 gimple_assign_rhs2 (stmt
));
531 STRIP_USELESS_TYPE_CONVERSION (result
);
532 if (valid_gimple_rhs_p (result
))
537 case GIMPLE_TERNARY_RHS
:
538 /* Try to fold a conditional expression. */
539 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
541 tree op0
= gimple_assign_rhs1 (stmt
);
544 location_t cond_loc
= gimple_location (stmt
);
546 if (COMPARISON_CLASS_P (op0
))
548 fold_defer_overflow_warnings ();
549 tem
= fold_binary_loc (cond_loc
,
550 TREE_CODE (op0
), TREE_TYPE (op0
),
551 TREE_OPERAND (op0
, 0),
552 TREE_OPERAND (op0
, 1));
553 /* This is actually a conditional expression, not a GIMPLE
554 conditional statement, however, the valid_gimple_rhs_p
555 test still applies. */
556 set
= (tem
&& is_gimple_condexpr (tem
)
557 && valid_gimple_rhs_p (tem
));
558 fold_undefer_overflow_warnings (set
, stmt
, 0);
560 else if (is_gimple_min_invariant (op0
))
569 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
570 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
571 gimple_assign_rhs2 (stmt
),
572 gimple_assign_rhs3 (stmt
));
576 result
= fold_ternary_loc (loc
, subcode
,
577 TREE_TYPE (gimple_assign_lhs (stmt
)),
578 gimple_assign_rhs1 (stmt
),
579 gimple_assign_rhs2 (stmt
),
580 gimple_assign_rhs3 (stmt
));
584 STRIP_USELESS_TYPE_CONVERSION (result
);
585 if (valid_gimple_rhs_p (result
))
590 case GIMPLE_INVALID_RHS
:
597 /* Attempt to fold a conditional statement. Return true if any changes were
598 made. We only attempt to fold the condition expression, and do not perform
599 any transformation that would require alteration of the cfg. It is
600 assumed that the operands have been previously folded. */
603 fold_gimple_cond (gimple stmt
)
605 tree result
= fold_binary_loc (gimple_location (stmt
),
606 gimple_cond_code (stmt
),
608 gimple_cond_lhs (stmt
),
609 gimple_cond_rhs (stmt
));
613 STRIP_USELESS_TYPE_CONVERSION (result
);
614 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
616 gimple_cond_set_condition_from_tree (stmt
, result
);
624 /* Convert EXPR into a GIMPLE value suitable for substitution on the
625 RHS of an assignment. Insert the necessary statements before
626 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
627 is replaced. If the call is expected to produces a result, then it
628 is replaced by an assignment of the new RHS to the result variable.
629 If the result is to be ignored, then the call is replaced by a
630 GIMPLE_NOP. A proper VDEF chain is retained by making the first
631 VUSE and the last VDEF of the whole sequence be the same as the replaced
632 statement and using new SSA names for stores in between. */
635 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
638 gimple stmt
, new_stmt
;
639 gimple_stmt_iterator i
;
640 gimple_seq stmts
= NULL
;
644 stmt
= gsi_stmt (*si_p
);
646 gcc_assert (is_gimple_call (stmt
));
648 push_gimplify_context (gimple_in_ssa_p (cfun
));
650 lhs
= gimple_call_lhs (stmt
);
651 if (lhs
== NULL_TREE
)
653 gimplify_and_add (expr
, &stmts
);
654 /* We can end up with folding a memcpy of an empty class assignment
655 which gets optimized away by C++ gimplification. */
656 if (gimple_seq_empty_p (stmts
))
658 pop_gimplify_context (NULL
);
659 if (gimple_in_ssa_p (cfun
))
661 unlink_stmt_vdef (stmt
);
664 gsi_replace (si_p
, gimple_build_nop (), true);
670 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
671 new_stmt
= gimple_build_assign (lhs
, tmp
);
672 i
= gsi_last (stmts
);
673 gsi_insert_after_without_update (&i
, new_stmt
,
674 GSI_CONTINUE_LINKING
);
677 pop_gimplify_context (NULL
);
679 if (gimple_has_location (stmt
))
680 annotate_all_with_location (stmts
, gimple_location (stmt
));
682 /* First iterate over the replacement statements backward, assigning
683 virtual operands to their defining statements. */
685 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
687 new_stmt
= gsi_stmt (i
);
688 if ((gimple_assign_single_p (new_stmt
)
689 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
690 || (is_gimple_call (new_stmt
)
691 && (gimple_call_flags (new_stmt
)
692 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
696 vdef
= gimple_vdef (stmt
);
698 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
699 gimple_set_vdef (new_stmt
, vdef
);
700 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
701 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
702 laststore
= new_stmt
;
706 /* Second iterate over the statements forward, assigning virtual
707 operands to their uses. */
708 reaching_vuse
= gimple_vuse (stmt
);
709 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
711 new_stmt
= gsi_stmt (i
);
712 /* If the new statement possibly has a VUSE, update it with exact SSA
713 name we know will reach this one. */
714 if (gimple_has_mem_ops (new_stmt
))
715 gimple_set_vuse (new_stmt
, reaching_vuse
);
716 gimple_set_modified (new_stmt
, true);
717 if (gimple_vdef (new_stmt
))
718 reaching_vuse
= gimple_vdef (new_stmt
);
721 /* If the new sequence does not do a store release the virtual
722 definition of the original statement. */
724 && reaching_vuse
== gimple_vuse (stmt
))
726 tree vdef
= gimple_vdef (stmt
);
728 && TREE_CODE (vdef
) == SSA_NAME
)
730 unlink_stmt_vdef (stmt
);
731 release_ssa_name (vdef
);
735 /* Finally replace the original statement with the sequence. */
736 gsi_replace_with_seq (si_p
, stmts
, false);
739 /* Return the string length, maximum string length or maximum value of
741 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
742 is not NULL and, for TYPE == 0, its value is not equal to the length
743 we determine or if we are unable to determine the length or value,
744 return false. VISITED is a bitmap of visited variables.
745 TYPE is 0 if string length should be returned, 1 for maximum string
746 length and 2 for maximum value ARG can have. */
749 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
754 if (TREE_CODE (arg
) != SSA_NAME
)
756 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
757 if (TREE_CODE (arg
) == ADDR_EXPR
758 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
759 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
761 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
762 if (TREE_CODE (aop0
) == INDIRECT_REF
763 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
764 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
765 length
, visited
, type
);
771 if (TREE_CODE (val
) != INTEGER_CST
772 || tree_int_cst_sgn (val
) < 0)
776 val
= c_strlen (arg
, 1);
784 if (TREE_CODE (*length
) != INTEGER_CST
785 || TREE_CODE (val
) != INTEGER_CST
)
788 if (tree_int_cst_lt (*length
, val
))
792 else if (simple_cst_equal (val
, *length
) != 1)
800 /* If ARG is registered for SSA update we cannot look at its defining
802 if (name_registered_for_update_p (arg
))
805 /* If we were already here, break the infinite cycle. */
806 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
810 def_stmt
= SSA_NAME_DEF_STMT (var
);
812 switch (gimple_code (def_stmt
))
815 /* The RHS of the statement defining VAR must either have a
816 constant length or come from another SSA_NAME with a constant
818 if (gimple_assign_single_p (def_stmt
)
819 || gimple_assign_unary_nop_p (def_stmt
))
821 tree rhs
= gimple_assign_rhs1 (def_stmt
);
822 return get_maxval_strlen (rhs
, length
, visited
, type
);
824 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
826 tree op2
= gimple_assign_rhs2 (def_stmt
);
827 tree op3
= gimple_assign_rhs3 (def_stmt
);
828 return get_maxval_strlen (op2
, length
, visited
, type
)
829 && get_maxval_strlen (op3
, length
, visited
, type
);
835 /* All the arguments of the PHI node must have the same constant
839 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
841 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
843 /* If this PHI has itself as an argument, we cannot
844 determine the string length of this argument. However,
845 if we can find a constant string length for the other
846 PHI args then we can still be sure that this is a
847 constant string length. So be optimistic and just
848 continue with the next argument. */
849 if (arg
== gimple_phi_result (def_stmt
))
852 if (!get_maxval_strlen (arg
, length
, visited
, type
))
864 /* Fold builtin call in statement STMT. Returns a simplified tree.
865 We may return a non-constant expression, including another call
866 to a different function and with different arguments, e.g.,
867 substituting memcpy for strcpy when the string length is known.
868 Note that some builtins expand into inline code that may not
869 be valid in GIMPLE. Callers must take care. */
872 gimple_fold_builtin (gimple stmt
)
880 location_t loc
= gimple_location (stmt
);
882 gcc_assert (is_gimple_call (stmt
));
884 ignore
= (gimple_call_lhs (stmt
) == NULL
);
886 /* First try the generic builtin folder. If that succeeds, return the
888 result
= fold_call_stmt (stmt
, ignore
);
896 /* Ignore MD builtins. */
897 callee
= gimple_call_fndecl (stmt
);
898 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
901 /* Give up for always_inline inline builtins until they are
903 if (avoid_folding_inline_builtin (callee
))
906 /* If the builtin could not be folded, and it has no argument list,
908 nargs
= gimple_call_num_args (stmt
);
912 /* Limit the work only for builtins we know how to simplify. */
913 switch (DECL_FUNCTION_CODE (callee
))
915 case BUILT_IN_STRLEN
:
917 case BUILT_IN_FPUTS_UNLOCKED
:
921 case BUILT_IN_STRCPY
:
922 case BUILT_IN_STRNCPY
:
926 case BUILT_IN_MEMCPY_CHK
:
927 case BUILT_IN_MEMPCPY_CHK
:
928 case BUILT_IN_MEMMOVE_CHK
:
929 case BUILT_IN_MEMSET_CHK
:
930 case BUILT_IN_STRNCPY_CHK
:
931 case BUILT_IN_STPNCPY_CHK
:
935 case BUILT_IN_STRCPY_CHK
:
936 case BUILT_IN_STPCPY_CHK
:
940 case BUILT_IN_SNPRINTF_CHK
:
941 case BUILT_IN_VSNPRINTF_CHK
:
949 if (arg_idx
>= nargs
)
952 /* Try to use the dataflow information gathered by the CCP process. */
953 visited
= BITMAP_ALLOC (NULL
);
954 bitmap_clear (visited
);
956 memset (val
, 0, sizeof (val
));
957 a
= gimple_call_arg (stmt
, arg_idx
);
958 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
959 val
[arg_idx
] = NULL_TREE
;
961 BITMAP_FREE (visited
);
964 switch (DECL_FUNCTION_CODE (callee
))
966 case BUILT_IN_STRLEN
:
967 if (val
[0] && nargs
== 1)
970 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
972 /* If the result is not a valid gimple value, or not a cast
973 of a valid gimple value, then we cannot use the result. */
974 if (is_gimple_val (new_val
)
975 || (CONVERT_EXPR_P (new_val
)
976 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
981 case BUILT_IN_STRCPY
:
982 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
983 result
= fold_builtin_strcpy (loc
, callee
,
984 gimple_call_arg (stmt
, 0),
985 gimple_call_arg (stmt
, 1),
989 case BUILT_IN_STRNCPY
:
990 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
991 result
= fold_builtin_strncpy (loc
, callee
,
992 gimple_call_arg (stmt
, 0),
993 gimple_call_arg (stmt
, 1),
994 gimple_call_arg (stmt
, 2),
1000 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1001 gimple_call_arg (stmt
, 1),
1002 ignore
, false, val
[0]);
1005 case BUILT_IN_FPUTS_UNLOCKED
:
1007 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1008 gimple_call_arg (stmt
, 1),
1009 ignore
, true, val
[0]);
1012 case BUILT_IN_MEMCPY_CHK
:
1013 case BUILT_IN_MEMPCPY_CHK
:
1014 case BUILT_IN_MEMMOVE_CHK
:
1015 case BUILT_IN_MEMSET_CHK
:
1016 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1017 result
= fold_builtin_memory_chk (loc
, callee
,
1018 gimple_call_arg (stmt
, 0),
1019 gimple_call_arg (stmt
, 1),
1020 gimple_call_arg (stmt
, 2),
1021 gimple_call_arg (stmt
, 3),
1023 DECL_FUNCTION_CODE (callee
));
1026 case BUILT_IN_STRCPY_CHK
:
1027 case BUILT_IN_STPCPY_CHK
:
1028 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1029 result
= fold_builtin_stxcpy_chk (loc
, callee
,
1030 gimple_call_arg (stmt
, 0),
1031 gimple_call_arg (stmt
, 1),
1032 gimple_call_arg (stmt
, 2),
1034 DECL_FUNCTION_CODE (callee
));
1037 case BUILT_IN_STRNCPY_CHK
:
1038 case BUILT_IN_STPNCPY_CHK
:
1039 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1040 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1041 gimple_call_arg (stmt
, 1),
1042 gimple_call_arg (stmt
, 2),
1043 gimple_call_arg (stmt
, 3),
1045 DECL_FUNCTION_CODE (callee
));
1048 case BUILT_IN_SNPRINTF_CHK
:
1049 case BUILT_IN_VSNPRINTF_CHK
:
1050 if (val
[1] && is_gimple_val (val
[1]))
1051 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1052 DECL_FUNCTION_CODE (callee
));
1059 if (result
&& ignore
)
1060 result
= fold_ignored_result (result
);
1065 /* Return a binfo to be used for devirtualization of calls based on an object
1066 represented by a declaration (i.e. a global or automatically allocated one)
1067 or NULL if it cannot be found or is not safe. CST is expected to be an
1068 ADDR_EXPR of such object or the function will return NULL. Currently it is
1069 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1070 EXPECTED_TYPE is type of the class virtual belongs to. */
1073 gimple_extract_devirt_binfo_from_cst (tree cst
, tree expected_type
)
1075 HOST_WIDE_INT offset
, size
, max_size
;
1076 tree base
, type
, binfo
;
1077 bool last_artificial
= false;
1079 if (!flag_devirtualize
1080 || TREE_CODE (cst
) != ADDR_EXPR
1081 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1084 cst
= TREE_OPERAND (cst
, 0);
1085 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1086 type
= TREE_TYPE (base
);
1090 || TREE_CODE (type
) != RECORD_TYPE
)
1093 /* Find the sub-object the constant actually refers to and mark whether it is
1094 an artificial one (as opposed to a user-defined one). */
1097 HOST_WIDE_INT pos
, size
;
1100 if (types_same_for_odr (type
, expected_type
))
1105 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1107 if (TREE_CODE (fld
) != FIELD_DECL
)
1110 pos
= int_bit_position (fld
);
1111 size
= tree_to_uhwi (DECL_SIZE (fld
));
1112 if (pos
<= offset
&& (pos
+ size
) > offset
)
1115 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1118 last_artificial
= DECL_ARTIFICIAL (fld
);
1119 type
= TREE_TYPE (fld
);
1122 /* Artificial sub-objects are ancestors, we do not want to use them for
1123 devirtualization, at least not here. */
1124 if (last_artificial
)
1126 binfo
= TYPE_BINFO (type
);
1127 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1133 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1134 The statement may be replaced by another statement, e.g., if the call
1135 simplifies to a constant value. Return true if any changes were made.
1136 It is assumed that the operands have been previously folded. */
1139 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1141 gimple stmt
= gsi_stmt (*gsi
);
1143 bool changed
= false;
1146 /* Fold *& in call arguments. */
1147 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1148 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1150 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1153 gimple_call_set_arg (stmt
, i
, tmp
);
1158 /* Check for virtual calls that became direct calls. */
1159 callee
= gimple_call_fn (stmt
);
1160 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1162 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1164 if (dump_file
&& virtual_method_call_p (callee
)
1165 && !possible_polymorphic_call_target_p
1166 (callee
, cgraph_get_node (gimple_call_addr_fndecl
1167 (OBJ_TYPE_REF_EXPR (callee
)))))
1170 "Type inheritnace inconsistent devirtualization of ");
1171 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1172 fprintf (dump_file
, " to ");
1173 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
1174 fprintf (dump_file
, "\n");
1177 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1180 else if (flag_devirtualize
&& virtual_method_call_p (callee
))
1183 vec
<cgraph_node
*>targets
1184 = possible_polymorphic_call_targets (callee
, &final
);
1185 if (final
&& targets
.length () <= 1)
1188 if (targets
.length () == 1)
1189 fndecl
= targets
[0]->decl
;
1191 fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
1192 gimple_call_set_fndecl (stmt
, fndecl
);
1201 /* Check for builtins that CCP can handle using information not
1202 available in the generic fold routines. */
1203 callee
= gimple_call_fndecl (stmt
);
1204 if (callee
&& DECL_BUILT_IN (callee
))
1206 tree result
= gimple_fold_builtin (stmt
);
1209 if (!update_call_from_tree (gsi
, result
))
1210 gimplify_and_update_call_from_tree (gsi
, result
);
1213 else if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1214 changed
|= targetm
.gimple_fold_builtin (gsi
);
1220 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1221 distinguishes both cases. */
1224 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1226 bool changed
= false;
1227 gimple stmt
= gsi_stmt (*gsi
);
1230 /* Fold the main computation performed by the statement. */
1231 switch (gimple_code (stmt
))
1235 unsigned old_num_ops
= gimple_num_ops (stmt
);
1236 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1237 tree lhs
= gimple_assign_lhs (stmt
);
1239 /* First canonicalize operand order. This avoids building new
1240 trees if this is the only thing fold would later do. */
1241 if ((commutative_tree_code (subcode
)
1242 || commutative_ternary_tree_code (subcode
))
1243 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1244 gimple_assign_rhs2 (stmt
), false))
1246 tree tem
= gimple_assign_rhs1 (stmt
);
1247 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1248 gimple_assign_set_rhs2 (stmt
, tem
);
1251 new_rhs
= fold_gimple_assign (gsi
);
1253 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1254 TREE_TYPE (new_rhs
)))
1255 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1258 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1260 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1267 changed
|= fold_gimple_cond (stmt
);
1271 changed
|= gimple_fold_call (gsi
, inplace
);
1275 /* Fold *& in asm operands. */
1278 const char **oconstraints
;
1279 const char *constraint
;
1280 bool allows_mem
, allows_reg
;
1282 noutputs
= gimple_asm_noutputs (stmt
);
1283 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1285 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1287 tree link
= gimple_asm_output_op (stmt
, i
);
1288 tree op
= TREE_VALUE (link
);
1290 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1291 if (REFERENCE_CLASS_P (op
)
1292 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1294 TREE_VALUE (link
) = op
;
1298 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1300 tree link
= gimple_asm_input_op (stmt
, i
);
1301 tree op
= TREE_VALUE (link
);
1303 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1304 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1305 oconstraints
, &allows_mem
, &allows_reg
);
1306 if (REFERENCE_CLASS_P (op
)
1307 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1310 TREE_VALUE (link
) = op
;
1318 if (gimple_debug_bind_p (stmt
))
1320 tree val
= gimple_debug_bind_get_value (stmt
);
1322 && REFERENCE_CLASS_P (val
))
1324 tree tem
= maybe_fold_reference (val
, false);
1327 gimple_debug_bind_set_value (stmt
, tem
);
1332 && TREE_CODE (val
) == ADDR_EXPR
)
1334 tree ref
= TREE_OPERAND (val
, 0);
1335 tree tem
= maybe_fold_reference (ref
, false);
1338 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1339 gimple_debug_bind_set_value (stmt
, tem
);
1349 stmt
= gsi_stmt (*gsi
);
1351 /* Fold *& on the lhs. */
1352 if (gimple_has_lhs (stmt
))
1354 tree lhs
= gimple_get_lhs (stmt
);
1355 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1357 tree new_lhs
= maybe_fold_reference (lhs
, true);
1360 gimple_set_lhs (stmt
, new_lhs
);
1369 /* Fold the statement pointed to by GSI. In some cases, this function may
1370 replace the whole statement with a new one. Returns true iff folding
1372 The statement pointed to by GSI should be in valid gimple form but may
1373 be in unfolded state as resulting from for example constant propagation
1374 which can produce *&x = 0. */
1377 fold_stmt (gimple_stmt_iterator
*gsi
)
1379 return fold_stmt_1 (gsi
, false);
1382 /* Perform the minimal folding on statement *GSI. Only operations like
1383 *&x created by constant propagation are handled. The statement cannot
1384 be replaced with a new one. Return true if the statement was
1385 changed, false otherwise.
1386 The statement *GSI should be in valid gimple form but may
1387 be in unfolded state as resulting from for example constant propagation
1388 which can produce *&x = 0. */
1391 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1393 gimple stmt
= gsi_stmt (*gsi
);
1394 bool changed
= fold_stmt_1 (gsi
, true);
1395 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1399 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1400 if EXPR is null or we don't know how.
1401 If non-null, the result always has boolean type. */
1404 canonicalize_bool (tree expr
, bool invert
)
1410 if (integer_nonzerop (expr
))
1411 return boolean_false_node
;
1412 else if (integer_zerop (expr
))
1413 return boolean_true_node
;
1414 else if (TREE_CODE (expr
) == SSA_NAME
)
1415 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1416 build_int_cst (TREE_TYPE (expr
), 0));
1417 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1418 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1420 TREE_OPERAND (expr
, 0),
1421 TREE_OPERAND (expr
, 1));
1427 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1429 if (integer_nonzerop (expr
))
1430 return boolean_true_node
;
1431 else if (integer_zerop (expr
))
1432 return boolean_false_node
;
1433 else if (TREE_CODE (expr
) == SSA_NAME
)
1434 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1435 build_int_cst (TREE_TYPE (expr
), 0));
1436 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1437 return fold_build2 (TREE_CODE (expr
),
1439 TREE_OPERAND (expr
, 0),
1440 TREE_OPERAND (expr
, 1));
1446 /* Check to see if a boolean expression EXPR is logically equivalent to the
1447 comparison (OP1 CODE OP2). Check for various identities involving
1451 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1452 const_tree op1
, const_tree op2
)
1456 /* The obvious case. */
1457 if (TREE_CODE (expr
) == code
1458 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1459 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1462 /* Check for comparing (name, name != 0) and the case where expr
1463 is an SSA_NAME with a definition matching the comparison. */
1464 if (TREE_CODE (expr
) == SSA_NAME
1465 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1467 if (operand_equal_p (expr
, op1
, 0))
1468 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1469 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1470 s
= SSA_NAME_DEF_STMT (expr
);
1471 if (is_gimple_assign (s
)
1472 && gimple_assign_rhs_code (s
) == code
1473 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1474 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1478 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1479 of name is a comparison, recurse. */
1480 if (TREE_CODE (op1
) == SSA_NAME
1481 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1483 s
= SSA_NAME_DEF_STMT (op1
);
1484 if (is_gimple_assign (s
)
1485 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1487 enum tree_code c
= gimple_assign_rhs_code (s
);
1488 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1489 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1490 return same_bool_comparison_p (expr
, c
,
1491 gimple_assign_rhs1 (s
),
1492 gimple_assign_rhs2 (s
));
1493 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1494 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1495 return same_bool_comparison_p (expr
,
1496 invert_tree_comparison (c
, false),
1497 gimple_assign_rhs1 (s
),
1498 gimple_assign_rhs2 (s
));
1504 /* Check to see if two boolean expressions OP1 and OP2 are logically
1508 same_bool_result_p (const_tree op1
, const_tree op2
)
1510 /* Simple cases first. */
1511 if (operand_equal_p (op1
, op2
, 0))
1514 /* Check the cases where at least one of the operands is a comparison.
1515 These are a bit smarter than operand_equal_p in that they apply some
1516 identifies on SSA_NAMEs. */
1517 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1518 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1519 TREE_OPERAND (op2
, 0),
1520 TREE_OPERAND (op2
, 1)))
1522 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1523 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1524 TREE_OPERAND (op1
, 0),
1525 TREE_OPERAND (op1
, 1)))
1532 /* Forward declarations for some mutually recursive functions. */
1535 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1536 enum tree_code code2
, tree op2a
, tree op2b
);
1538 and_var_with_comparison (tree var
, bool invert
,
1539 enum tree_code code2
, tree op2a
, tree op2b
);
1541 and_var_with_comparison_1 (gimple stmt
,
1542 enum tree_code code2
, tree op2a
, tree op2b
);
1544 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1545 enum tree_code code2
, tree op2a
, tree op2b
);
1547 or_var_with_comparison (tree var
, bool invert
,
1548 enum tree_code code2
, tree op2a
, tree op2b
);
1550 or_var_with_comparison_1 (gimple stmt
,
1551 enum tree_code code2
, tree op2a
, tree op2b
);
1553 /* Helper function for and_comparisons_1: try to simplify the AND of the
1554 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1555 If INVERT is true, invert the value of the VAR before doing the AND.
1556 Return NULL_EXPR if we can't simplify this to a single expression. */
1559 and_var_with_comparison (tree var
, bool invert
,
1560 enum tree_code code2
, tree op2a
, tree op2b
)
1563 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1565 /* We can only deal with variables whose definitions are assignments. */
1566 if (!is_gimple_assign (stmt
))
1569 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1570 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1571 Then we only have to consider the simpler non-inverted cases. */
1573 t
= or_var_with_comparison_1 (stmt
,
1574 invert_tree_comparison (code2
, false),
1577 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1578 return canonicalize_bool (t
, invert
);
1581 /* Try to simplify the AND of the ssa variable defined by the assignment
1582 STMT with the comparison specified by (OP2A CODE2 OP2B).
1583 Return NULL_EXPR if we can't simplify this to a single expression. */
1586 and_var_with_comparison_1 (gimple stmt
,
1587 enum tree_code code2
, tree op2a
, tree op2b
)
1589 tree var
= gimple_assign_lhs (stmt
);
1590 tree true_test_var
= NULL_TREE
;
1591 tree false_test_var
= NULL_TREE
;
1592 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1594 /* Check for identities like (var AND (var == 0)) => false. */
1595 if (TREE_CODE (op2a
) == SSA_NAME
1596 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1598 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1599 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1601 true_test_var
= op2a
;
1602 if (var
== true_test_var
)
1605 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1606 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1608 false_test_var
= op2a
;
1609 if (var
== false_test_var
)
1610 return boolean_false_node
;
1614 /* If the definition is a comparison, recurse on it. */
1615 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1617 tree t
= and_comparisons_1 (innercode
,
1618 gimple_assign_rhs1 (stmt
),
1619 gimple_assign_rhs2 (stmt
),
1627 /* If the definition is an AND or OR expression, we may be able to
1628 simplify by reassociating. */
1629 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1630 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1632 tree inner1
= gimple_assign_rhs1 (stmt
);
1633 tree inner2
= gimple_assign_rhs2 (stmt
);
1636 tree partial
= NULL_TREE
;
1637 bool is_and
= (innercode
== BIT_AND_EXPR
);
1639 /* Check for boolean identities that don't require recursive examination
1641 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1642 inner1 AND (inner1 OR inner2) => inner1
1643 !inner1 AND (inner1 AND inner2) => false
1644 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1645 Likewise for similar cases involving inner2. */
1646 if (inner1
== true_test_var
)
1647 return (is_and
? var
: inner1
);
1648 else if (inner2
== true_test_var
)
1649 return (is_and
? var
: inner2
);
1650 else if (inner1
== false_test_var
)
1652 ? boolean_false_node
1653 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1654 else if (inner2
== false_test_var
)
1656 ? boolean_false_node
1657 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1659 /* Next, redistribute/reassociate the AND across the inner tests.
1660 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1661 if (TREE_CODE (inner1
) == SSA_NAME
1662 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1663 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1664 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1665 gimple_assign_rhs1 (s
),
1666 gimple_assign_rhs2 (s
),
1667 code2
, op2a
, op2b
)))
1669 /* Handle the AND case, where we are reassociating:
1670 (inner1 AND inner2) AND (op2a code2 op2b)
1672 If the partial result t is a constant, we win. Otherwise
1673 continue on to try reassociating with the other inner test. */
1676 if (integer_onep (t
))
1678 else if (integer_zerop (t
))
1679 return boolean_false_node
;
1682 /* Handle the OR case, where we are redistributing:
1683 (inner1 OR inner2) AND (op2a code2 op2b)
1684 => (t OR (inner2 AND (op2a code2 op2b))) */
1685 else if (integer_onep (t
))
1686 return boolean_true_node
;
1688 /* Save partial result for later. */
1692 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1693 if (TREE_CODE (inner2
) == SSA_NAME
1694 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1695 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1696 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1697 gimple_assign_rhs1 (s
),
1698 gimple_assign_rhs2 (s
),
1699 code2
, op2a
, op2b
)))
1701 /* Handle the AND case, where we are reassociating:
1702 (inner1 AND inner2) AND (op2a code2 op2b)
1703 => (inner1 AND t) */
1706 if (integer_onep (t
))
1708 else if (integer_zerop (t
))
1709 return boolean_false_node
;
1710 /* If both are the same, we can apply the identity
1712 else if (partial
&& same_bool_result_p (t
, partial
))
1716 /* Handle the OR case. where we are redistributing:
1717 (inner1 OR inner2) AND (op2a code2 op2b)
1718 => (t OR (inner1 AND (op2a code2 op2b)))
1719 => (t OR partial) */
1722 if (integer_onep (t
))
1723 return boolean_true_node
;
1726 /* We already got a simplification for the other
1727 operand to the redistributed OR expression. The
1728 interesting case is when at least one is false.
1729 Or, if both are the same, we can apply the identity
1731 if (integer_zerop (partial
))
1733 else if (integer_zerop (t
))
1735 else if (same_bool_result_p (t
, partial
))
1744 /* Try to simplify the AND of two comparisons defined by
1745 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1746 If this can be done without constructing an intermediate value,
1747 return the resulting tree; otherwise NULL_TREE is returned.
1748 This function is deliberately asymmetric as it recurses on SSA_DEFs
1749 in the first comparison but not the second. */
1752 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1753 enum tree_code code2
, tree op2a
, tree op2b
)
1755 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
1757 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1758 if (operand_equal_p (op1a
, op2a
, 0)
1759 && operand_equal_p (op1b
, op2b
, 0))
1761 /* Result will be either NULL_TREE, or a combined comparison. */
1762 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1763 TRUTH_ANDIF_EXPR
, code1
, code2
,
1764 truth_type
, op1a
, op1b
);
1769 /* Likewise the swapped case of the above. */
1770 if (operand_equal_p (op1a
, op2b
, 0)
1771 && operand_equal_p (op1b
, op2a
, 0))
1773 /* Result will be either NULL_TREE, or a combined comparison. */
1774 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1775 TRUTH_ANDIF_EXPR
, code1
,
1776 swap_tree_comparison (code2
),
1777 truth_type
, op1a
, op1b
);
1782 /* If both comparisons are of the same value against constants, we might
1783 be able to merge them. */
1784 if (operand_equal_p (op1a
, op2a
, 0)
1785 && TREE_CODE (op1b
) == INTEGER_CST
1786 && TREE_CODE (op2b
) == INTEGER_CST
)
1788 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1790 /* If we have (op1a == op1b), we should either be able to
1791 return that or FALSE, depending on whether the constant op1b
1792 also satisfies the other comparison against op2b. */
1793 if (code1
== EQ_EXPR
)
1799 case EQ_EXPR
: val
= (cmp
== 0); break;
1800 case NE_EXPR
: val
= (cmp
!= 0); break;
1801 case LT_EXPR
: val
= (cmp
< 0); break;
1802 case GT_EXPR
: val
= (cmp
> 0); break;
1803 case LE_EXPR
: val
= (cmp
<= 0); break;
1804 case GE_EXPR
: val
= (cmp
>= 0); break;
1805 default: done
= false;
1810 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1812 return boolean_false_node
;
1815 /* Likewise if the second comparison is an == comparison. */
1816 else if (code2
== EQ_EXPR
)
1822 case EQ_EXPR
: val
= (cmp
== 0); break;
1823 case NE_EXPR
: val
= (cmp
!= 0); break;
1824 case LT_EXPR
: val
= (cmp
> 0); break;
1825 case GT_EXPR
: val
= (cmp
< 0); break;
1826 case LE_EXPR
: val
= (cmp
>= 0); break;
1827 case GE_EXPR
: val
= (cmp
<= 0); break;
1828 default: done
= false;
1833 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1835 return boolean_false_node
;
1839 /* Same business with inequality tests. */
1840 else if (code1
== NE_EXPR
)
1845 case EQ_EXPR
: val
= (cmp
!= 0); break;
1846 case NE_EXPR
: val
= (cmp
== 0); break;
1847 case LT_EXPR
: val
= (cmp
>= 0); break;
1848 case GT_EXPR
: val
= (cmp
<= 0); break;
1849 case LE_EXPR
: val
= (cmp
> 0); break;
1850 case GE_EXPR
: val
= (cmp
< 0); break;
1855 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1857 else if (code2
== NE_EXPR
)
1862 case EQ_EXPR
: val
= (cmp
== 0); break;
1863 case NE_EXPR
: val
= (cmp
!= 0); break;
1864 case LT_EXPR
: val
= (cmp
<= 0); break;
1865 case GT_EXPR
: val
= (cmp
>= 0); break;
1866 case LE_EXPR
: val
= (cmp
< 0); break;
1867 case GE_EXPR
: val
= (cmp
> 0); break;
1872 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1875 /* Chose the more restrictive of two < or <= comparisons. */
1876 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1877 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1879 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1880 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1882 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1885 /* Likewise chose the more restrictive of two > or >= comparisons. */
1886 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1887 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1889 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1890 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1892 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1895 /* Check for singleton ranges. */
1897 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1898 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1899 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1901 /* Check for disjoint ranges. */
1903 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1904 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1905 return boolean_false_node
;
1907 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1908 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1909 return boolean_false_node
;
1912 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1913 NAME's definition is a truth value. See if there are any simplifications
1914 that can be done against the NAME's definition. */
1915 if (TREE_CODE (op1a
) == SSA_NAME
1916 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1917 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1919 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1920 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1921 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1922 switch (gimple_code (stmt
))
1925 /* Try to simplify by copy-propagating the definition. */
1926 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1929 /* If every argument to the PHI produces the same result when
1930 ANDed with the second comparison, we win.
1931 Do not do this unless the type is bool since we need a bool
1932 result here anyway. */
1933 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1935 tree result
= NULL_TREE
;
1937 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1939 tree arg
= gimple_phi_arg_def (stmt
, i
);
1941 /* If this PHI has itself as an argument, ignore it.
1942 If all the other args produce the same result,
1944 if (arg
== gimple_phi_result (stmt
))
1946 else if (TREE_CODE (arg
) == INTEGER_CST
)
1948 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1951 result
= boolean_false_node
;
1952 else if (!integer_zerop (result
))
1956 result
= fold_build2 (code2
, boolean_type_node
,
1958 else if (!same_bool_comparison_p (result
,
1962 else if (TREE_CODE (arg
) == SSA_NAME
1963 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1966 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1967 /* In simple cases we can look through PHI nodes,
1968 but we have to be careful with loops.
1970 if (! dom_info_available_p (CDI_DOMINATORS
)
1971 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1972 || dominated_by_p (CDI_DOMINATORS
,
1973 gimple_bb (def_stmt
),
1976 temp
= and_var_with_comparison (arg
, invert
, code2
,
1982 else if (!same_bool_result_p (result
, temp
))
1998 /* Try to simplify the AND of two comparisons, specified by
1999 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2000 If this can be simplified to a single expression (without requiring
2001 introducing more SSA variables to hold intermediate values),
2002 return the resulting tree. Otherwise return NULL_TREE.
2003 If the result expression is non-null, it has boolean type. */
2006 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2007 enum tree_code code2
, tree op2a
, tree op2b
)
2009 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2013 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2016 /* Helper function for or_comparisons_1: try to simplify the OR of the
2017 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2018 If INVERT is true, invert the value of VAR before doing the OR.
2019 Return NULL_EXPR if we can't simplify this to a single expression. */
2022 or_var_with_comparison (tree var
, bool invert
,
2023 enum tree_code code2
, tree op2a
, tree op2b
)
2026 gimple stmt
= SSA_NAME_DEF_STMT (var
);
2028 /* We can only deal with variables whose definitions are assignments. */
2029 if (!is_gimple_assign (stmt
))
2032 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2033 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2034 Then we only have to consider the simpler non-inverted cases. */
2036 t
= and_var_with_comparison_1 (stmt
,
2037 invert_tree_comparison (code2
, false),
2040 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2041 return canonicalize_bool (t
, invert
);
2044 /* Try to simplify the OR of the ssa variable defined by the assignment
2045 STMT with the comparison specified by (OP2A CODE2 OP2B).
2046 Return NULL_EXPR if we can't simplify this to a single expression. */
2049 or_var_with_comparison_1 (gimple stmt
,
2050 enum tree_code code2
, tree op2a
, tree op2b
)
2052 tree var
= gimple_assign_lhs (stmt
);
2053 tree true_test_var
= NULL_TREE
;
2054 tree false_test_var
= NULL_TREE
;
2055 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2057 /* Check for identities like (var OR (var != 0)) => true . */
2058 if (TREE_CODE (op2a
) == SSA_NAME
2059 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2061 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2062 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2064 true_test_var
= op2a
;
2065 if (var
== true_test_var
)
2068 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2069 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2071 false_test_var
= op2a
;
2072 if (var
== false_test_var
)
2073 return boolean_true_node
;
2077 /* If the definition is a comparison, recurse on it. */
2078 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2080 tree t
= or_comparisons_1 (innercode
,
2081 gimple_assign_rhs1 (stmt
),
2082 gimple_assign_rhs2 (stmt
),
2090 /* If the definition is an AND or OR expression, we may be able to
2091 simplify by reassociating. */
2092 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2093 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2095 tree inner1
= gimple_assign_rhs1 (stmt
);
2096 tree inner2
= gimple_assign_rhs2 (stmt
);
2099 tree partial
= NULL_TREE
;
2100 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2102 /* Check for boolean identities that don't require recursive examination
2104 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2105 inner1 OR (inner1 AND inner2) => inner1
2106 !inner1 OR (inner1 OR inner2) => true
2107 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2109 if (inner1
== true_test_var
)
2110 return (is_or
? var
: inner1
);
2111 else if (inner2
== true_test_var
)
2112 return (is_or
? var
: inner2
);
2113 else if (inner1
== false_test_var
)
2116 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2117 else if (inner2
== false_test_var
)
2120 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2122 /* Next, redistribute/reassociate the OR across the inner tests.
2123 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2124 if (TREE_CODE (inner1
) == SSA_NAME
2125 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2126 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2127 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2128 gimple_assign_rhs1 (s
),
2129 gimple_assign_rhs2 (s
),
2130 code2
, op2a
, op2b
)))
2132 /* Handle the OR case, where we are reassociating:
2133 (inner1 OR inner2) OR (op2a code2 op2b)
2135 If the partial result t is a constant, we win. Otherwise
2136 continue on to try reassociating with the other inner test. */
2139 if (integer_onep (t
))
2140 return boolean_true_node
;
2141 else if (integer_zerop (t
))
2145 /* Handle the AND case, where we are redistributing:
2146 (inner1 AND inner2) OR (op2a code2 op2b)
2147 => (t AND (inner2 OR (op2a code op2b))) */
2148 else if (integer_zerop (t
))
2149 return boolean_false_node
;
2151 /* Save partial result for later. */
2155 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2156 if (TREE_CODE (inner2
) == SSA_NAME
2157 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2158 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2159 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2160 gimple_assign_rhs1 (s
),
2161 gimple_assign_rhs2 (s
),
2162 code2
, op2a
, op2b
)))
2164 /* Handle the OR case, where we are reassociating:
2165 (inner1 OR inner2) OR (op2a code2 op2b)
2167 => (t OR partial) */
2170 if (integer_zerop (t
))
2172 else if (integer_onep (t
))
2173 return boolean_true_node
;
2174 /* If both are the same, we can apply the identity
2176 else if (partial
&& same_bool_result_p (t
, partial
))
2180 /* Handle the AND case, where we are redistributing:
2181 (inner1 AND inner2) OR (op2a code2 op2b)
2182 => (t AND (inner1 OR (op2a code2 op2b)))
2183 => (t AND partial) */
2186 if (integer_zerop (t
))
2187 return boolean_false_node
;
2190 /* We already got a simplification for the other
2191 operand to the redistributed AND expression. The
2192 interesting case is when at least one is true.
2193 Or, if both are the same, we can apply the identity
2195 if (integer_onep (partial
))
2197 else if (integer_onep (t
))
2199 else if (same_bool_result_p (t
, partial
))
2208 /* Try to simplify the OR of two comparisons defined by
2209 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2210 If this can be done without constructing an intermediate value,
2211 return the resulting tree; otherwise NULL_TREE is returned.
2212 This function is deliberately asymmetric as it recurses on SSA_DEFs
2213 in the first comparison but not the second. */
2216 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2217 enum tree_code code2
, tree op2a
, tree op2b
)
2219 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
2221 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2222 if (operand_equal_p (op1a
, op2a
, 0)
2223 && operand_equal_p (op1b
, op2b
, 0))
2225 /* Result will be either NULL_TREE, or a combined comparison. */
2226 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2227 TRUTH_ORIF_EXPR
, code1
, code2
,
2228 truth_type
, op1a
, op1b
);
2233 /* Likewise the swapped case of the above. */
2234 if (operand_equal_p (op1a
, op2b
, 0)
2235 && operand_equal_p (op1b
, op2a
, 0))
2237 /* Result will be either NULL_TREE, or a combined comparison. */
2238 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2239 TRUTH_ORIF_EXPR
, code1
,
2240 swap_tree_comparison (code2
),
2241 truth_type
, op1a
, op1b
);
2246 /* If both comparisons are of the same value against constants, we might
2247 be able to merge them. */
2248 if (operand_equal_p (op1a
, op2a
, 0)
2249 && TREE_CODE (op1b
) == INTEGER_CST
2250 && TREE_CODE (op2b
) == INTEGER_CST
)
2252 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2254 /* If we have (op1a != op1b), we should either be able to
2255 return that or TRUE, depending on whether the constant op1b
2256 also satisfies the other comparison against op2b. */
2257 if (code1
== NE_EXPR
)
2263 case EQ_EXPR
: val
= (cmp
== 0); break;
2264 case NE_EXPR
: val
= (cmp
!= 0); break;
2265 case LT_EXPR
: val
= (cmp
< 0); break;
2266 case GT_EXPR
: val
= (cmp
> 0); break;
2267 case LE_EXPR
: val
= (cmp
<= 0); break;
2268 case GE_EXPR
: val
= (cmp
>= 0); break;
2269 default: done
= false;
2274 return boolean_true_node
;
2276 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2279 /* Likewise if the second comparison is a != comparison. */
2280 else if (code2
== NE_EXPR
)
2286 case EQ_EXPR
: val
= (cmp
== 0); break;
2287 case NE_EXPR
: val
= (cmp
!= 0); break;
2288 case LT_EXPR
: val
= (cmp
> 0); break;
2289 case GT_EXPR
: val
= (cmp
< 0); break;
2290 case LE_EXPR
: val
= (cmp
>= 0); break;
2291 case GE_EXPR
: val
= (cmp
<= 0); break;
2292 default: done
= false;
2297 return boolean_true_node
;
2299 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2303 /* See if an equality test is redundant with the other comparison. */
2304 else if (code1
== EQ_EXPR
)
2309 case EQ_EXPR
: val
= (cmp
== 0); break;
2310 case NE_EXPR
: val
= (cmp
!= 0); break;
2311 case LT_EXPR
: val
= (cmp
< 0); break;
2312 case GT_EXPR
: val
= (cmp
> 0); break;
2313 case LE_EXPR
: val
= (cmp
<= 0); break;
2314 case GE_EXPR
: val
= (cmp
>= 0); break;
2319 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2321 else if (code2
== EQ_EXPR
)
2326 case EQ_EXPR
: val
= (cmp
== 0); break;
2327 case NE_EXPR
: val
= (cmp
!= 0); break;
2328 case LT_EXPR
: val
= (cmp
> 0); break;
2329 case GT_EXPR
: val
= (cmp
< 0); break;
2330 case LE_EXPR
: val
= (cmp
>= 0); break;
2331 case GE_EXPR
: val
= (cmp
<= 0); break;
2336 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2339 /* Chose the less restrictive of two < or <= comparisons. */
2340 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2341 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2343 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2344 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2346 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2349 /* Likewise chose the less restrictive of two > or >= comparisons. */
2350 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2351 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2353 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2354 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2356 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2359 /* Check for singleton ranges. */
2361 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2362 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2363 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2365 /* Check for less/greater pairs that don't restrict the range at all. */
2367 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2368 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2369 return boolean_true_node
;
2371 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2372 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2373 return boolean_true_node
;
2376 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2377 NAME's definition is a truth value. See if there are any simplifications
2378 that can be done against the NAME's definition. */
2379 if (TREE_CODE (op1a
) == SSA_NAME
2380 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2381 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2383 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2384 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2385 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2386 switch (gimple_code (stmt
))
2389 /* Try to simplify by copy-propagating the definition. */
2390 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2393 /* If every argument to the PHI produces the same result when
2394 ORed with the second comparison, we win.
2395 Do not do this unless the type is bool since we need a bool
2396 result here anyway. */
2397 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2399 tree result
= NULL_TREE
;
2401 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2403 tree arg
= gimple_phi_arg_def (stmt
, i
);
2405 /* If this PHI has itself as an argument, ignore it.
2406 If all the other args produce the same result,
2408 if (arg
== gimple_phi_result (stmt
))
2410 else if (TREE_CODE (arg
) == INTEGER_CST
)
2412 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2415 result
= boolean_true_node
;
2416 else if (!integer_onep (result
))
2420 result
= fold_build2 (code2
, boolean_type_node
,
2422 else if (!same_bool_comparison_p (result
,
2426 else if (TREE_CODE (arg
) == SSA_NAME
2427 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2430 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2431 /* In simple cases we can look through PHI nodes,
2432 but we have to be careful with loops.
2434 if (! dom_info_available_p (CDI_DOMINATORS
)
2435 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2436 || dominated_by_p (CDI_DOMINATORS
,
2437 gimple_bb (def_stmt
),
2440 temp
= or_var_with_comparison (arg
, invert
, code2
,
2446 else if (!same_bool_result_p (result
, temp
))
2462 /* Try to simplify the OR of two comparisons, specified by
2463 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2464 If this can be simplified to a single expression (without requiring
2465 introducing more SSA variables to hold intermediate values),
2466 return the resulting tree. Otherwise return NULL_TREE.
2467 If the result expression is non-null, it has boolean type. */
2470 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2471 enum tree_code code2
, tree op2a
, tree op2b
)
2473 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2477 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2481 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2483 Either NULL_TREE, a simplified but non-constant or a constant
2486 ??? This should go into a gimple-fold-inline.h file to be eventually
2487 privatized with the single valueize function used in the various TUs
2488 to avoid the indirect function call overhead. */
2491 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2493 location_t loc
= gimple_location (stmt
);
2494 switch (gimple_code (stmt
))
2498 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2500 switch (get_gimple_rhs_class (subcode
))
2502 case GIMPLE_SINGLE_RHS
:
2504 tree rhs
= gimple_assign_rhs1 (stmt
);
2505 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2507 if (TREE_CODE (rhs
) == SSA_NAME
)
2509 /* If the RHS is an SSA_NAME, return its known constant value,
2511 return (*valueize
) (rhs
);
2513 /* Handle propagating invariant addresses into address
2515 else if (TREE_CODE (rhs
) == ADDR_EXPR
2516 && !is_gimple_min_invariant (rhs
))
2518 HOST_WIDE_INT offset
= 0;
2520 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2524 && (CONSTANT_CLASS_P (base
)
2525 || decl_address_invariant_p (base
)))
2526 return build_invariant_address (TREE_TYPE (rhs
),
2529 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2530 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2531 && (CONSTRUCTOR_NELTS (rhs
)
2532 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2537 vec
= XALLOCAVEC (tree
,
2538 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2539 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2541 val
= (*valueize
) (val
);
2542 if (TREE_CODE (val
) == INTEGER_CST
2543 || TREE_CODE (val
) == REAL_CST
2544 || TREE_CODE (val
) == FIXED_CST
)
2550 return build_vector (TREE_TYPE (rhs
), vec
);
2552 if (subcode
== OBJ_TYPE_REF
)
2554 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
2555 /* If callee is constant, we can fold away the wrapper. */
2556 if (is_gimple_min_invariant (val
))
2560 if (kind
== tcc_reference
)
2562 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2563 || TREE_CODE (rhs
) == REALPART_EXPR
2564 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2565 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2567 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2568 return fold_unary_loc (EXPR_LOCATION (rhs
),
2570 TREE_TYPE (rhs
), val
);
2572 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2573 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2575 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2576 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2578 TREE_TYPE (rhs
), val
,
2579 TREE_OPERAND (rhs
, 1),
2580 TREE_OPERAND (rhs
, 2));
2582 else if (TREE_CODE (rhs
) == MEM_REF
2583 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2585 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2586 if (TREE_CODE (val
) == ADDR_EXPR
2587 && is_gimple_min_invariant (val
))
2589 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2591 TREE_OPERAND (rhs
, 1));
2596 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2598 else if (kind
== tcc_declaration
)
2599 return get_symbol_constant_value (rhs
);
2603 case GIMPLE_UNARY_RHS
:
2605 /* Handle unary operators that can appear in GIMPLE form.
2606 Note that we know the single operand must be a constant,
2607 so this should almost always return a simplified RHS. */
2608 tree lhs
= gimple_assign_lhs (stmt
);
2609 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2611 /* Conversions are useless for CCP purposes if they are
2612 value-preserving. Thus the restrictions that
2613 useless_type_conversion_p places for restrict qualification
2614 of pointer types should not apply here.
2615 Substitution later will only substitute to allowed places. */
2616 if (CONVERT_EXPR_CODE_P (subcode
)
2617 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2618 && POINTER_TYPE_P (TREE_TYPE (op0
))
2619 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2620 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2621 && TYPE_MODE (TREE_TYPE (lhs
))
2622 == TYPE_MODE (TREE_TYPE (op0
)))
2626 fold_unary_ignore_overflow_loc (loc
, subcode
,
2627 gimple_expr_type (stmt
), op0
);
2630 case GIMPLE_BINARY_RHS
:
2632 /* Handle binary operators that can appear in GIMPLE form. */
2633 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2634 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2636 /* Translate &x + CST into an invariant form suitable for
2637 further propagation. */
2638 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2639 && TREE_CODE (op0
) == ADDR_EXPR
2640 && TREE_CODE (op1
) == INTEGER_CST
)
2642 tree off
= fold_convert (ptr_type_node
, op1
);
2643 return build_fold_addr_expr_loc
2645 fold_build2 (MEM_REF
,
2646 TREE_TYPE (TREE_TYPE (op0
)),
2647 unshare_expr (op0
), off
));
2650 return fold_binary_loc (loc
, subcode
,
2651 gimple_expr_type (stmt
), op0
, op1
);
2654 case GIMPLE_TERNARY_RHS
:
2656 /* Handle ternary operators that can appear in GIMPLE form. */
2657 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2658 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2659 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2661 /* Fold embedded expressions in ternary codes. */
2662 if ((subcode
== COND_EXPR
2663 || subcode
== VEC_COND_EXPR
)
2664 && COMPARISON_CLASS_P (op0
))
2666 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2667 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2668 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2669 TREE_TYPE (op0
), op00
, op01
);
2674 return fold_ternary_loc (loc
, subcode
,
2675 gimple_expr_type (stmt
), op0
, op1
, op2
);
2687 if (gimple_call_internal_p (stmt
))
2689 enum tree_code subcode
= ERROR_MARK
;
2690 switch (gimple_call_internal_fn (stmt
))
2692 case IFN_UBSAN_CHECK_ADD
:
2693 subcode
= PLUS_EXPR
;
2695 case IFN_UBSAN_CHECK_SUB
:
2696 subcode
= MINUS_EXPR
;
2698 case IFN_UBSAN_CHECK_MUL
:
2699 subcode
= MULT_EXPR
;
2704 tree op0
= (*valueize
) (gimple_call_arg (stmt
, 0));
2705 tree op1
= (*valueize
) (gimple_call_arg (stmt
, 1));
2707 if (TREE_CODE (op0
) != INTEGER_CST
2708 || TREE_CODE (op1
) != INTEGER_CST
)
2710 tree res
= fold_binary_loc (loc
, subcode
,
2711 TREE_TYPE (gimple_call_arg (stmt
, 0)),
2714 && TREE_CODE (res
) == INTEGER_CST
2715 && !TREE_OVERFLOW (res
))
2720 fn
= (*valueize
) (gimple_call_fn (stmt
));
2721 if (TREE_CODE (fn
) == ADDR_EXPR
2722 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2723 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2725 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2728 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2729 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2730 call
= build_call_array_loc (loc
,
2731 gimple_call_return_type (stmt
),
2732 fn
, gimple_call_num_args (stmt
), args
);
2733 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2735 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2736 STRIP_NOPS (retval
);
2747 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2748 Returns NULL_TREE if folding to a constant is not possible, otherwise
2749 returns a constant according to is_gimple_min_invariant. */
2752 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2754 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2755 if (res
&& is_gimple_min_invariant (res
))
2761 /* The following set of functions are supposed to fold references using
2762 their constant initializers. */
2764 static tree
fold_ctor_reference (tree type
, tree ctor
,
2765 unsigned HOST_WIDE_INT offset
,
2766 unsigned HOST_WIDE_INT size
, tree
);
2768 /* See if we can find constructor defining value of BASE.
2769 When we know the consructor with constant offset (such as
2770 base is array[40] and we do know constructor of array), then
2771 BIT_OFFSET is adjusted accordingly.
2773 As a special case, return error_mark_node when constructor
2774 is not explicitly available, but it is known to be zero
2775 such as 'static const int a;'. */
2777 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2778 tree (*valueize
)(tree
))
2780 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2781 if (TREE_CODE (base
) == MEM_REF
)
2783 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2785 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
2787 *bit_offset
+= (mem_ref_offset (base
).low
2792 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2793 base
= valueize (TREE_OPERAND (base
, 0));
2794 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2796 base
= TREE_OPERAND (base
, 0);
2799 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2800 DECL_INITIAL. If BASE is a nested reference into another
2801 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2802 the inner reference. */
2803 switch (TREE_CODE (base
))
2808 tree init
= ctor_for_folding (base
);
2810 /* Our semantic is exact opposite of ctor_for_folding;
2811 NULL means unknown, while error_mark_node is 0. */
2812 if (init
== error_mark_node
)
2815 return error_mark_node
;
2821 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2822 if (max_size
== -1 || size
!= max_size
)
2824 *bit_offset
+= bit_offset2
;
2825 return get_base_constructor (base
, bit_offset
, valueize
);
2836 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2837 to the memory at bit OFFSET.
2839 We do only simple job of folding byte accesses. */
2842 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2843 unsigned HOST_WIDE_INT offset
,
2844 unsigned HOST_WIDE_INT size
)
2846 if (INTEGRAL_TYPE_P (type
)
2847 && (TYPE_MODE (type
)
2848 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2849 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2851 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2852 && size
== BITS_PER_UNIT
2853 && !(offset
% BITS_PER_UNIT
))
2855 offset
/= BITS_PER_UNIT
;
2856 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2857 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2860 const char a[20]="hello";
2863 might lead to offset greater than string length. In this case we
2864 know value is either initialized to 0 or out of bounds. Return 0
2866 return build_zero_cst (type
);
2871 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2872 SIZE to the memory at bit OFFSET. */
2875 fold_array_ctor_reference (tree type
, tree ctor
,
2876 unsigned HOST_WIDE_INT offset
,
2877 unsigned HOST_WIDE_INT size
,
2880 unsigned HOST_WIDE_INT cnt
;
2882 double_int low_bound
, elt_size
;
2883 double_int index
, max_index
;
2884 double_int access_index
;
2885 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2886 HOST_WIDE_INT inner_offset
;
2888 /* Compute low bound and elt size. */
2889 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2890 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2891 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2893 /* Static constructors for variably sized objects makes no sense. */
2894 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2895 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2896 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2899 low_bound
= double_int_zero
;
2900 /* Static constructors for variably sized objects makes no sense. */
2901 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2904 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2907 /* We can handle only constantly sized accesses that are known to not
2908 be larger than size of array element. */
2909 if (!TYPE_SIZE_UNIT (type
)
2910 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2911 || elt_size
.slt (tree_to_double_int (TYPE_SIZE_UNIT (type
))))
2914 /* Compute the array index we look for. */
2915 access_index
= double_int::from_uhwi (offset
/ BITS_PER_UNIT
)
2916 .udiv (elt_size
, TRUNC_DIV_EXPR
);
2917 access_index
+= low_bound
;
2919 access_index
= access_index
.ext (TYPE_PRECISION (index_type
),
2920 TYPE_UNSIGNED (index_type
));
2922 /* And offset within the access. */
2923 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
2925 /* See if the array field is large enough to span whole access. We do not
2926 care to fold accesses spanning multiple array indexes. */
2927 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
2930 index
= low_bound
- double_int_one
;
2932 index
= index
.ext (TYPE_PRECISION (index_type
), TYPE_UNSIGNED (index_type
));
2934 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2936 /* Array constructor might explicitely set index, or specify range
2937 or leave index NULL meaning that it is next index after previous
2941 if (TREE_CODE (cfield
) == INTEGER_CST
)
2942 max_index
= index
= tree_to_double_int (cfield
);
2945 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2946 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2947 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2952 index
+= double_int_one
;
2954 index
= index
.ext (TYPE_PRECISION (index_type
),
2955 TYPE_UNSIGNED (index_type
));
2959 /* Do we have match? */
2960 if (access_index
.cmp (index
, 1) >= 0
2961 && access_index
.cmp (max_index
, 1) <= 0)
2962 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
2965 /* When memory is not explicitely mentioned in constructor,
2966 it is 0 (or out of range). */
2967 return build_zero_cst (type
);
2970 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2971 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2974 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2975 unsigned HOST_WIDE_INT offset
,
2976 unsigned HOST_WIDE_INT size
,
2979 unsigned HOST_WIDE_INT cnt
;
2982 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2985 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2986 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2987 tree field_size
= DECL_SIZE (cfield
);
2988 double_int bitoffset
;
2989 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2990 double_int bits_per_unit_cst
= double_int::from_uhwi (BITS_PER_UNIT
);
2991 double_int bitoffset_end
, access_end
;
2993 /* Variable sized objects in static constructors makes no sense,
2994 but field_size can be NULL for flexible array members. */
2995 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2996 && TREE_CODE (byte_offset
) == INTEGER_CST
2997 && (field_size
!= NULL_TREE
2998 ? TREE_CODE (field_size
) == INTEGER_CST
2999 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
3001 /* Compute bit offset of the field. */
3002 bitoffset
= tree_to_double_int (field_offset
)
3003 + byte_offset_cst
* bits_per_unit_cst
;
3004 /* Compute bit offset where the field ends. */
3005 if (field_size
!= NULL_TREE
)
3006 bitoffset_end
= bitoffset
+ tree_to_double_int (field_size
);
3008 bitoffset_end
= double_int_zero
;
3010 access_end
= double_int::from_uhwi (offset
)
3011 + double_int::from_uhwi (size
);
3013 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3014 [BITOFFSET, BITOFFSET_END)? */
3015 if (access_end
.cmp (bitoffset
, 0) > 0
3016 && (field_size
== NULL_TREE
3017 || double_int::from_uhwi (offset
).slt (bitoffset_end
)))
3019 double_int inner_offset
= double_int::from_uhwi (offset
) - bitoffset
;
3020 /* We do have overlap. Now see if field is large enough to
3021 cover the access. Give up for accesses spanning multiple
3023 if (access_end
.cmp (bitoffset_end
, 0) > 0)
3025 if (double_int::from_uhwi (offset
).slt (bitoffset
))
3027 return fold_ctor_reference (type
, cval
,
3028 inner_offset
.to_uhwi (), size
,
3032 /* When memory is not explicitely mentioned in constructor, it is 0. */
3033 return build_zero_cst (type
);
3036 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3037 to the memory at bit OFFSET. */
3040 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
3041 unsigned HOST_WIDE_INT size
, tree from_decl
)
3045 /* We found the field with exact match. */
3046 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
3048 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
3050 /* We are at the end of walk, see if we can view convert the
3052 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
3053 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3054 && operand_equal_p (TYPE_SIZE (type
),
3055 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
3057 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
3058 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
3063 if (TREE_CODE (ctor
) == STRING_CST
)
3064 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
3065 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
3068 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
3069 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
3070 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
3073 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
3080 /* Return the tree representing the element referenced by T if T is an
3081 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3082 names using VALUEIZE. Return NULL_TREE otherwise. */
3085 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
3087 tree ctor
, idx
, base
;
3088 HOST_WIDE_INT offset
, size
, max_size
;
3091 if (TREE_THIS_VOLATILE (t
))
3094 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3095 return get_symbol_constant_value (t
);
3097 tem
= fold_read_from_constant_string (t
);
3101 switch (TREE_CODE (t
))
3104 case ARRAY_RANGE_REF
:
3105 /* Constant indexes are handled well by get_base_constructor.
3106 Only special case variable offsets.
3107 FIXME: This code can't handle nested references with variable indexes
3108 (they will be handled only by iteration of ccp). Perhaps we can bring
3109 get_ref_base_and_extent here and make it use a valueize callback. */
3110 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3112 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3113 && TREE_CODE (idx
) == INTEGER_CST
)
3115 tree low_bound
, unit_size
;
3118 /* If the resulting bit-offset is constant, track it. */
3119 if ((low_bound
= array_ref_low_bound (t
),
3120 TREE_CODE (low_bound
) == INTEGER_CST
)
3121 && (unit_size
= array_ref_element_size (t
),
3122 tree_fits_uhwi_p (unit_size
))
3123 && (doffset
= (TREE_INT_CST (idx
) - TREE_INT_CST (low_bound
))
3124 .sext (TYPE_PRECISION (TREE_TYPE (idx
))),
3125 doffset
.fits_shwi ()))
3127 offset
= doffset
.to_shwi ();
3128 offset
*= tree_to_uhwi (unit_size
);
3129 offset
*= BITS_PER_UNIT
;
3131 base
= TREE_OPERAND (t
, 0);
3132 ctor
= get_base_constructor (base
, &offset
, valueize
);
3133 /* Empty constructor. Always fold to 0. */
3134 if (ctor
== error_mark_node
)
3135 return build_zero_cst (TREE_TYPE (t
));
3136 /* Out of bound array access. Value is undefined,
3140 /* We can not determine ctor. */
3143 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3144 tree_to_uhwi (unit_size
)
3153 case TARGET_MEM_REF
:
3155 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3156 ctor
= get_base_constructor (base
, &offset
, valueize
);
3158 /* Empty constructor. Always fold to 0. */
3159 if (ctor
== error_mark_node
)
3160 return build_zero_cst (TREE_TYPE (t
));
3161 /* We do not know precise address. */
3162 if (max_size
== -1 || max_size
!= size
)
3164 /* We can not determine ctor. */
3168 /* Out of bound array access. Value is undefined, but don't fold. */
3172 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3178 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3179 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3180 return fold_build1_loc (EXPR_LOCATION (t
),
3181 TREE_CODE (t
), TREE_TYPE (t
), c
);
3193 fold_const_aggregate_ref (tree t
)
3195 return fold_const_aggregate_ref_1 (t
, NULL
);
3198 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3199 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3200 KNOWN_BINFO carries the binfo describing the true type of
3201 OBJ_TYPE_REF_OBJECT(REF). */
3204 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3206 unsigned HOST_WIDE_INT offset
, size
;
3207 tree v
, fn
, vtable
, init
;
3209 vtable
= v
= BINFO_VTABLE (known_binfo
);
3210 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3214 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3216 offset
= tree_to_uhwi (TREE_OPERAND (v
, 1)) * BITS_PER_UNIT
;
3217 v
= TREE_OPERAND (v
, 0);
3222 if (TREE_CODE (v
) != ADDR_EXPR
)
3224 v
= TREE_OPERAND (v
, 0);
3226 if (TREE_CODE (v
) != VAR_DECL
3227 || !DECL_VIRTUAL_P (v
))
3229 init
= ctor_for_folding (v
);
3231 /* The virtual tables should always be born with constructors.
3232 and we always should assume that they are avaialble for
3233 folding. At the moment we do not stream them in all cases,
3234 but it should never happen that ctor seem unreachable. */
3236 if (init
== error_mark_node
)
3238 gcc_assert (in_lto_p
);
3241 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3242 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
3243 offset
+= token
* size
;
3244 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), init
,
3246 if (!fn
|| integer_zerop (fn
))
3248 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3249 || TREE_CODE (fn
) == FDESC_EXPR
);
3250 fn
= TREE_OPERAND (fn
, 0);
3251 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3253 /* When cgraph node is missing and function is not public, we cannot
3254 devirtualize. This can happen in WHOPR when the actual method
3255 ends up in other partition, because we found devirtualization
3256 possibility too late. */
3257 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3260 /* Make sure we create a cgraph node for functions we'll reference.
3261 They can be non-existent if the reference comes from an entry
3262 of an external vtable for example. */
3263 cgraph_get_create_node (fn
);
3268 /* Return true iff VAL is a gimple expression that is known to be
3269 non-negative. Restricted to floating-point inputs. */
3272 gimple_val_nonnegative_real_p (tree val
)
3276 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3278 /* Use existing logic for non-gimple trees. */
3279 if (tree_expr_nonnegative_p (val
))
3282 if (TREE_CODE (val
) != SSA_NAME
)
3285 /* Currently we look only at the immediately defining statement
3286 to make this determination, since recursion on defining
3287 statements of operands can lead to quadratic behavior in the
3288 worst case. This is expected to catch almost all occurrences
3289 in practice. It would be possible to implement limited-depth
3290 recursion if important cases are lost. Alternatively, passes
3291 that need this information (such as the pow/powi lowering code
3292 in the cse_sincos pass) could be revised to provide it through
3293 dataflow propagation. */
3295 def_stmt
= SSA_NAME_DEF_STMT (val
);
3297 if (is_gimple_assign (def_stmt
))
3301 /* See fold-const.c:tree_expr_nonnegative_p for additional
3302 cases that could be handled with recursion. */
3304 switch (gimple_assign_rhs_code (def_stmt
))
3307 /* Always true for floating-point operands. */
3311 /* True if the two operands are identical (since we are
3312 restricted to floating-point inputs). */
3313 op0
= gimple_assign_rhs1 (def_stmt
);
3314 op1
= gimple_assign_rhs2 (def_stmt
);
3317 || operand_equal_p (op0
, op1
, 0))
3324 else if (is_gimple_call (def_stmt
))
3326 tree fndecl
= gimple_call_fndecl (def_stmt
);
3328 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3332 switch (DECL_FUNCTION_CODE (fndecl
))
3334 CASE_FLT_FN (BUILT_IN_ACOS
):
3335 CASE_FLT_FN (BUILT_IN_ACOSH
):
3336 CASE_FLT_FN (BUILT_IN_CABS
):
3337 CASE_FLT_FN (BUILT_IN_COSH
):
3338 CASE_FLT_FN (BUILT_IN_ERFC
):
3339 CASE_FLT_FN (BUILT_IN_EXP
):
3340 CASE_FLT_FN (BUILT_IN_EXP10
):
3341 CASE_FLT_FN (BUILT_IN_EXP2
):
3342 CASE_FLT_FN (BUILT_IN_FABS
):
3343 CASE_FLT_FN (BUILT_IN_FDIM
):
3344 CASE_FLT_FN (BUILT_IN_HYPOT
):
3345 CASE_FLT_FN (BUILT_IN_POW10
):
3348 CASE_FLT_FN (BUILT_IN_SQRT
):
3349 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3350 nonnegative inputs. */
3351 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3356 CASE_FLT_FN (BUILT_IN_POWI
):
3357 /* True if the second argument is an even integer. */
3358 arg1
= gimple_call_arg (def_stmt
, 1);
3360 if (TREE_CODE (arg1
) == INTEGER_CST
3361 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3366 CASE_FLT_FN (BUILT_IN_POW
):
3367 /* True if the second argument is an even integer-valued
3369 arg1
= gimple_call_arg (def_stmt
, 1);
3371 if (TREE_CODE (arg1
) == REAL_CST
)
3376 c
= TREE_REAL_CST (arg1
);
3377 n
= real_to_integer (&c
);
3381 REAL_VALUE_TYPE cint
;
3382 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3383 if (real_identical (&c
, &cint
))
3399 /* Given a pointer value OP0, return a simplified version of an
3400 indirection through OP0, or NULL_TREE if no simplification is
3401 possible. Note that the resulting type may be different from
3402 the type pointed to in the sense that it is still compatible
3403 from the langhooks point of view. */
3406 gimple_fold_indirect_ref (tree t
)
3408 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
3413 subtype
= TREE_TYPE (sub
);
3414 if (!POINTER_TYPE_P (subtype
))
3417 if (TREE_CODE (sub
) == ADDR_EXPR
)
3419 tree op
= TREE_OPERAND (sub
, 0);
3420 tree optype
= TREE_TYPE (op
);
3422 if (useless_type_conversion_p (type
, optype
))
3425 /* *(foo *)&fooarray => fooarray[0] */
3426 if (TREE_CODE (optype
) == ARRAY_TYPE
3427 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
3428 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3430 tree type_domain
= TYPE_DOMAIN (optype
);
3431 tree min_val
= size_zero_node
;
3432 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
3433 min_val
= TYPE_MIN_VALUE (type_domain
);
3434 if (TREE_CODE (min_val
) == INTEGER_CST
)
3435 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
3437 /* *(foo *)&complexfoo => __real__ complexfoo */
3438 else if (TREE_CODE (optype
) == COMPLEX_TYPE
3439 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3440 return fold_build1 (REALPART_EXPR
, type
, op
);
3441 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3442 else if (TREE_CODE (optype
) == VECTOR_TYPE
3443 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3445 tree part_width
= TYPE_SIZE (type
);
3446 tree index
= bitsize_int (0);
3447 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
3451 /* *(p + CST) -> ... */
3452 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
3453 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
3455 tree addr
= TREE_OPERAND (sub
, 0);
3456 tree off
= TREE_OPERAND (sub
, 1);
3460 addrtype
= TREE_TYPE (addr
);
3462 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3463 if (TREE_CODE (addr
) == ADDR_EXPR
3464 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
3465 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
3466 && tree_fits_uhwi_p (off
))
3468 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
3469 tree part_width
= TYPE_SIZE (type
);
3470 unsigned HOST_WIDE_INT part_widthi
3471 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
3472 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
3473 tree index
= bitsize_int (indexi
);
3474 if (offset
/ part_widthi
3475 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
3476 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
3480 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3481 if (TREE_CODE (addr
) == ADDR_EXPR
3482 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
3483 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
3485 tree size
= TYPE_SIZE_UNIT (type
);
3486 if (tree_int_cst_equal (size
, off
))
3487 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
3490 /* *(p + CST) -> MEM_REF <p, CST>. */
3491 if (TREE_CODE (addr
) != ADDR_EXPR
3492 || DECL_P (TREE_OPERAND (addr
, 0)))
3493 return fold_build2 (MEM_REF
, type
,
3495 build_int_cst_wide (ptype
,
3496 TREE_INT_CST_LOW (off
),
3497 TREE_INT_CST_HIGH (off
)));
3500 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3501 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
3502 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
3503 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
3506 tree min_val
= size_zero_node
;
3508 sub
= gimple_fold_indirect_ref (sub
);
3510 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
3511 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
3512 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
3513 min_val
= TYPE_MIN_VALUE (type_domain
);
3514 if (TREE_CODE (min_val
) == INTEGER_CST
)
3515 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);