1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
48 #include "tree-ssa-propagate.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
55 /* Return true when DECL can be referenced from current unit.
56 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
57 We can get declarations that are not possible to reference for various
60 1) When analyzing C++ virtual tables.
61 C++ virtual tables do have known constructors even
62 when they are keyed to other compilation unit.
63 Those tables can contain pointers to methods and vars
64 in other units. Those methods have both STATIC and EXTERNAL
66 2) In WHOPR mode devirtualization might lead to reference
67 to method that was partitioned elsehwere.
68 In this case we have static VAR_DECL or FUNCTION_DECL
69 that has no corresponding callgraph/varpool node
71 3) COMDAT functions referred by external vtables that
72 we devirtualize only during final compilation stage.
73 At this time we already decided that we will not output
74 the function body and thus we can't reference the symbol
78 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
80 struct varpool_node
*vnode
;
81 struct cgraph_node
*node
;
84 if (DECL_ABSTRACT (decl
))
87 /* We are concerned only about static/external vars and functions. */
88 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
89 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
92 /* Static objects can be referred only if they was not optimized out yet. */
93 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
95 snode
= symtab_get_node (decl
);
98 node
= dyn_cast
<cgraph_node
> (snode
);
99 return !node
|| !node
->global
.inlined_to
;
102 /* We will later output the initializer, so we can refer to it.
103 So we are concerned only when DECL comes from initializer of
106 || TREE_CODE (from_decl
) != VAR_DECL
107 || !DECL_EXTERNAL (from_decl
)
109 && symtab_get_node (from_decl
)->in_other_partition
))
111 /* We are folding reference from external vtable. The vtable may reffer
112 to a symbol keyed to other compilation unit. The other compilation
113 unit may be in separate DSO and the symbol may be hidden. */
114 if (DECL_VISIBILITY_SPECIFIED (decl
)
115 && DECL_EXTERNAL (decl
)
116 && (!(snode
= symtab_get_node (decl
)) || !snode
->in_other_partition
))
118 /* When function is public, we always can introduce new reference.
119 Exception are the COMDAT functions where introducing a direct
120 reference imply need to include function body in the curren tunit. */
121 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
123 /* We are not at ltrans stage; so don't worry about WHOPR.
124 Also when still gimplifying all referred comdat functions will be
127 As observed in PR20991 for already optimized out comdat virtual functions
128 it may be tempting to not necessarily give up because the copy will be
129 output elsewhere when corresponding vtable is output.
130 This is however not possible - ABI specify that COMDATs are output in
131 units where they are used and when the other unit was compiled with LTO
132 it is possible that vtable was kept public while the function itself
134 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
137 /* OK we are seeing either COMDAT or static variable. In this case we must
138 check that the definition is still around so we can refer it. */
139 if (TREE_CODE (decl
) == FUNCTION_DECL
)
141 node
= cgraph_get_node (decl
);
142 /* Check that we still have function body and that we didn't took
143 the decision to eliminate offline copy of the function yet.
144 The second is important when devirtualization happens during final
145 compilation stage when making a new reference no longer makes callee
147 if (!node
|| !node
->definition
|| node
->global
.inlined_to
)
149 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
153 else if (TREE_CODE (decl
) == VAR_DECL
)
155 vnode
= varpool_get_node (decl
);
156 if (!vnode
|| !vnode
->definition
)
158 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
165 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
166 acceptable form for is_gimple_min_invariant.
167 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
170 canonicalize_constructor_val (tree cval
, tree from_decl
)
172 tree orig_cval
= cval
;
174 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
175 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
177 tree ptr
= TREE_OPERAND (cval
, 0);
178 if (is_gimple_min_invariant (ptr
))
179 cval
= build1_loc (EXPR_LOCATION (cval
),
180 ADDR_EXPR
, TREE_TYPE (ptr
),
181 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
183 fold_convert (ptr_type_node
,
184 TREE_OPERAND (cval
, 1))));
186 if (TREE_CODE (cval
) == ADDR_EXPR
)
188 tree base
= NULL_TREE
;
189 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
191 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
193 TREE_OPERAND (cval
, 0) = base
;
196 base
= get_base_address (TREE_OPERAND (cval
, 0));
200 if ((TREE_CODE (base
) == VAR_DECL
201 || TREE_CODE (base
) == FUNCTION_DECL
)
202 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
204 if (TREE_CODE (base
) == VAR_DECL
)
205 TREE_ADDRESSABLE (base
) = 1;
206 else if (TREE_CODE (base
) == FUNCTION_DECL
)
208 /* Make sure we create a cgraph node for functions we'll reference.
209 They can be non-existent if the reference comes from an entry
210 of an external vtable for example. */
211 cgraph_get_create_node (base
);
213 /* Fixup types in global initializers. */
214 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
215 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
217 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
218 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
221 if (TREE_OVERFLOW_P (cval
))
222 return drop_tree_overflow (cval
);
226 /* If SYM is a constant variable with known value, return the value.
227 NULL_TREE is returned otherwise. */
230 get_symbol_constant_value (tree sym
)
232 tree val
= ctor_for_folding (sym
);
233 if (val
!= error_mark_node
)
237 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
238 if (val
&& is_gimple_min_invariant (val
))
243 /* Variables declared 'const' without an initializer
244 have zero as the initializer if they may not be
245 overridden at link or run time. */
247 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
248 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
249 return build_zero_cst (TREE_TYPE (sym
));
257 /* Subroutine of fold_stmt. We perform several simplifications of the
258 memory reference tree EXPR and make sure to re-gimplify them properly
259 after propagation of constant addresses. IS_LHS is true if the
260 reference is supposed to be an lvalue. */
263 maybe_fold_reference (tree expr
, bool is_lhs
)
268 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
269 || TREE_CODE (expr
) == REALPART_EXPR
270 || TREE_CODE (expr
) == IMAGPART_EXPR
)
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
272 return fold_unary_loc (EXPR_LOCATION (expr
),
275 TREE_OPERAND (expr
, 0));
276 else if (TREE_CODE (expr
) == BIT_FIELD_REF
277 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
278 return fold_ternary_loc (EXPR_LOCATION (expr
),
281 TREE_OPERAND (expr
, 0),
282 TREE_OPERAND (expr
, 1),
283 TREE_OPERAND (expr
, 2));
285 while (handled_component_p (*t
))
286 t
= &TREE_OPERAND (*t
, 0);
288 /* Canonicalize MEM_REFs invariant address operand. Do this first
289 to avoid feeding non-canonical MEM_REFs elsewhere. */
290 if (TREE_CODE (*t
) == MEM_REF
291 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
293 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
294 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
295 TREE_OPERAND (*t
, 0),
296 TREE_OPERAND (*t
, 1));
299 TREE_THIS_VOLATILE (tem
) = volatile_p
;
301 tem
= maybe_fold_reference (expr
, is_lhs
);
309 && (result
= fold_const_aggregate_ref (expr
))
310 && is_gimple_min_invariant (result
))
313 /* Fold back MEM_REFs to reference trees. */
314 if (TREE_CODE (*t
) == MEM_REF
315 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
316 && integer_zerop (TREE_OPERAND (*t
, 1))
317 && (TREE_THIS_VOLATILE (*t
)
318 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
319 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
320 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
321 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
322 /* We have to look out here to not drop a required conversion
323 from the rhs to the lhs if is_lhs, but we don't have the
324 rhs here to verify that. Thus require strict type
326 && types_compatible_p (TREE_TYPE (*t
),
327 TREE_TYPE (TREE_OPERAND
328 (TREE_OPERAND (*t
, 0), 0))))
331 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
332 tem
= maybe_fold_reference (expr
, is_lhs
);
337 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
339 tree tem
= maybe_fold_tmr (*t
);
343 tem
= maybe_fold_reference (expr
, is_lhs
);
354 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
355 replacement rhs for the statement or NULL_TREE if no simplification
356 could be made. It is assumed that the operands have been previously
360 fold_gimple_assign (gimple_stmt_iterator
*si
)
362 gimple stmt
= gsi_stmt (*si
);
363 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
364 location_t loc
= gimple_location (stmt
);
366 tree result
= NULL_TREE
;
368 switch (get_gimple_rhs_class (subcode
))
370 case GIMPLE_SINGLE_RHS
:
372 tree rhs
= gimple_assign_rhs1 (stmt
);
374 if (REFERENCE_CLASS_P (rhs
))
375 return maybe_fold_reference (rhs
, false);
377 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
379 tree ref
= TREE_OPERAND (rhs
, 0);
380 tree tem
= maybe_fold_reference (ref
, true);
382 && TREE_CODE (tem
) == MEM_REF
383 && integer_zerop (TREE_OPERAND (tem
, 1)))
384 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
386 result
= fold_convert (TREE_TYPE (rhs
),
387 build_fold_addr_expr_loc (loc
, tem
));
388 else if (TREE_CODE (ref
) == MEM_REF
389 && integer_zerop (TREE_OPERAND (ref
, 1)))
390 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
393 else if (TREE_CODE (rhs
) == CONSTRUCTOR
394 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
395 && (CONSTRUCTOR_NELTS (rhs
)
396 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
398 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
402 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
403 if (TREE_CODE (val
) != INTEGER_CST
404 && TREE_CODE (val
) != REAL_CST
405 && TREE_CODE (val
) != FIXED_CST
)
408 return build_vector_from_ctor (TREE_TYPE (rhs
),
409 CONSTRUCTOR_ELTS (rhs
));
412 else if (DECL_P (rhs
))
413 return get_symbol_constant_value (rhs
);
415 /* If we couldn't fold the RHS, hand over to the generic
417 if (result
== NULL_TREE
)
420 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
421 that may have been added by fold, and "useless" type
422 conversions that might now be apparent due to propagation. */
423 STRIP_USELESS_TYPE_CONVERSION (result
);
425 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
432 case GIMPLE_UNARY_RHS
:
434 tree rhs
= gimple_assign_rhs1 (stmt
);
436 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
439 /* If the operation was a conversion do _not_ mark a
440 resulting constant with TREE_OVERFLOW if the original
441 constant was not. These conversions have implementation
442 defined behavior and retaining the TREE_OVERFLOW flag
443 here would confuse later passes such as VRP. */
444 if (CONVERT_EXPR_CODE_P (subcode
)
445 && TREE_CODE (result
) == INTEGER_CST
446 && TREE_CODE (rhs
) == INTEGER_CST
)
447 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
449 STRIP_USELESS_TYPE_CONVERSION (result
);
450 if (valid_gimple_rhs_p (result
))
456 case GIMPLE_BINARY_RHS
:
457 /* Try to canonicalize for boolean-typed X the comparisons
458 X == 0, X == 1, X != 0, and X != 1. */
459 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
460 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
462 tree lhs
= gimple_assign_lhs (stmt
);
463 tree op1
= gimple_assign_rhs1 (stmt
);
464 tree op2
= gimple_assign_rhs2 (stmt
);
465 tree type
= TREE_TYPE (op1
);
467 /* Check whether the comparison operands are of the same boolean
468 type as the result type is.
469 Check that second operand is an integer-constant with value
471 if (TREE_CODE (op2
) == INTEGER_CST
472 && (integer_zerop (op2
) || integer_onep (op2
))
473 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
475 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
476 bool is_logical_not
= false;
478 /* X == 0 and X != 1 is a logical-not.of X
479 X == 1 and X != 0 is X */
480 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
481 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
482 is_logical_not
= true;
484 if (is_logical_not
== false)
486 /* Only for one-bit precision typed X the transformation
487 !X -> ~X is valied. */
488 else if (TYPE_PRECISION (type
) == 1)
489 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
491 /* Otherwise we use !X -> X ^ 1. */
493 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
494 type
, op1
, build_int_cst (type
, 1));
500 result
= fold_binary_loc (loc
, subcode
,
501 TREE_TYPE (gimple_assign_lhs (stmt
)),
502 gimple_assign_rhs1 (stmt
),
503 gimple_assign_rhs2 (stmt
));
507 STRIP_USELESS_TYPE_CONVERSION (result
);
508 if (valid_gimple_rhs_p (result
))
513 case GIMPLE_TERNARY_RHS
:
514 /* Try to fold a conditional expression. */
515 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
517 tree op0
= gimple_assign_rhs1 (stmt
);
520 location_t cond_loc
= gimple_location (stmt
);
522 if (COMPARISON_CLASS_P (op0
))
524 fold_defer_overflow_warnings ();
525 tem
= fold_binary_loc (cond_loc
,
526 TREE_CODE (op0
), TREE_TYPE (op0
),
527 TREE_OPERAND (op0
, 0),
528 TREE_OPERAND (op0
, 1));
529 /* This is actually a conditional expression, not a GIMPLE
530 conditional statement, however, the valid_gimple_rhs_p
531 test still applies. */
532 set
= (tem
&& is_gimple_condexpr (tem
)
533 && valid_gimple_rhs_p (tem
));
534 fold_undefer_overflow_warnings (set
, stmt
, 0);
536 else if (is_gimple_min_invariant (op0
))
545 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
546 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
547 gimple_assign_rhs2 (stmt
),
548 gimple_assign_rhs3 (stmt
));
552 result
= fold_ternary_loc (loc
, subcode
,
553 TREE_TYPE (gimple_assign_lhs (stmt
)),
554 gimple_assign_rhs1 (stmt
),
555 gimple_assign_rhs2 (stmt
),
556 gimple_assign_rhs3 (stmt
));
560 STRIP_USELESS_TYPE_CONVERSION (result
);
561 if (valid_gimple_rhs_p (result
))
566 case GIMPLE_INVALID_RHS
:
573 /* Attempt to fold a conditional statement. Return true if any changes were
574 made. We only attempt to fold the condition expression, and do not perform
575 any transformation that would require alteration of the cfg. It is
576 assumed that the operands have been previously folded. */
579 fold_gimple_cond (gimple stmt
)
581 tree result
= fold_binary_loc (gimple_location (stmt
),
582 gimple_cond_code (stmt
),
584 gimple_cond_lhs (stmt
),
585 gimple_cond_rhs (stmt
));
589 STRIP_USELESS_TYPE_CONVERSION (result
);
590 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
592 gimple_cond_set_condition_from_tree (stmt
, result
);
600 /* Convert EXPR into a GIMPLE value suitable for substitution on the
601 RHS of an assignment. Insert the necessary statements before
602 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
603 is replaced. If the call is expected to produces a result, then it
604 is replaced by an assignment of the new RHS to the result variable.
605 If the result is to be ignored, then the call is replaced by a
606 GIMPLE_NOP. A proper VDEF chain is retained by making the first
607 VUSE and the last VDEF of the whole sequence be the same as the replaced
608 statement and using new SSA names for stores in between. */
611 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
614 gimple stmt
, new_stmt
;
615 gimple_stmt_iterator i
;
616 gimple_seq stmts
= NULL
;
620 stmt
= gsi_stmt (*si_p
);
622 gcc_assert (is_gimple_call (stmt
));
624 push_gimplify_context (gimple_in_ssa_p (cfun
));
626 lhs
= gimple_call_lhs (stmt
);
627 if (lhs
== NULL_TREE
)
629 gimplify_and_add (expr
, &stmts
);
630 /* We can end up with folding a memcpy of an empty class assignment
631 which gets optimized away by C++ gimplification. */
632 if (gimple_seq_empty_p (stmts
))
634 pop_gimplify_context (NULL
);
635 if (gimple_in_ssa_p (cfun
))
637 unlink_stmt_vdef (stmt
);
640 gsi_replace (si_p
, gimple_build_nop (), true);
646 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
647 new_stmt
= gimple_build_assign (lhs
, tmp
);
648 i
= gsi_last (stmts
);
649 gsi_insert_after_without_update (&i
, new_stmt
,
650 GSI_CONTINUE_LINKING
);
653 pop_gimplify_context (NULL
);
655 if (gimple_has_location (stmt
))
656 annotate_all_with_location (stmts
, gimple_location (stmt
));
658 /* First iterate over the replacement statements backward, assigning
659 virtual operands to their defining statements. */
661 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
663 new_stmt
= gsi_stmt (i
);
664 if ((gimple_assign_single_p (new_stmt
)
665 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
666 || (is_gimple_call (new_stmt
)
667 && (gimple_call_flags (new_stmt
)
668 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
672 vdef
= gimple_vdef (stmt
);
674 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
675 gimple_set_vdef (new_stmt
, vdef
);
676 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
677 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
678 laststore
= new_stmt
;
682 /* Second iterate over the statements forward, assigning virtual
683 operands to their uses. */
684 reaching_vuse
= gimple_vuse (stmt
);
685 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
687 new_stmt
= gsi_stmt (i
);
688 /* If the new statement possibly has a VUSE, update it with exact SSA
689 name we know will reach this one. */
690 if (gimple_has_mem_ops (new_stmt
))
691 gimple_set_vuse (new_stmt
, reaching_vuse
);
692 gimple_set_modified (new_stmt
, true);
693 if (gimple_vdef (new_stmt
))
694 reaching_vuse
= gimple_vdef (new_stmt
);
697 /* If the new sequence does not do a store release the virtual
698 definition of the original statement. */
700 && reaching_vuse
== gimple_vuse (stmt
))
702 tree vdef
= gimple_vdef (stmt
);
704 && TREE_CODE (vdef
) == SSA_NAME
)
706 unlink_stmt_vdef (stmt
);
707 release_ssa_name (vdef
);
711 /* Finally replace the original statement with the sequence. */
712 gsi_replace_with_seq (si_p
, stmts
, false);
715 /* Return the string length, maximum string length or maximum value of
717 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
718 is not NULL and, for TYPE == 0, its value is not equal to the length
719 we determine or if we are unable to determine the length or value,
720 return false. VISITED is a bitmap of visited variables.
721 TYPE is 0 if string length should be returned, 1 for maximum string
722 length and 2 for maximum value ARG can have. */
725 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
730 if (TREE_CODE (arg
) != SSA_NAME
)
732 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
733 if (TREE_CODE (arg
) == ADDR_EXPR
734 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
735 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
737 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
738 if (TREE_CODE (aop0
) == INDIRECT_REF
739 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
740 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
741 length
, visited
, type
);
747 if (TREE_CODE (val
) != INTEGER_CST
748 || tree_int_cst_sgn (val
) < 0)
752 val
= c_strlen (arg
, 1);
760 if (TREE_CODE (*length
) != INTEGER_CST
761 || TREE_CODE (val
) != INTEGER_CST
)
764 if (tree_int_cst_lt (*length
, val
))
768 else if (simple_cst_equal (val
, *length
) != 1)
776 /* If ARG is registered for SSA update we cannot look at its defining
778 if (name_registered_for_update_p (arg
))
781 /* If we were already here, break the infinite cycle. */
782 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
786 def_stmt
= SSA_NAME_DEF_STMT (var
);
788 switch (gimple_code (def_stmt
))
791 /* The RHS of the statement defining VAR must either have a
792 constant length or come from another SSA_NAME with a constant
794 if (gimple_assign_single_p (def_stmt
)
795 || gimple_assign_unary_nop_p (def_stmt
))
797 tree rhs
= gimple_assign_rhs1 (def_stmt
);
798 return get_maxval_strlen (rhs
, length
, visited
, type
);
800 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
802 tree op2
= gimple_assign_rhs2 (def_stmt
);
803 tree op3
= gimple_assign_rhs3 (def_stmt
);
804 return get_maxval_strlen (op2
, length
, visited
, type
)
805 && get_maxval_strlen (op3
, length
, visited
, type
);
811 /* All the arguments of the PHI node must have the same constant
815 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
817 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
819 /* If this PHI has itself as an argument, we cannot
820 determine the string length of this argument. However,
821 if we can find a constant string length for the other
822 PHI args then we can still be sure that this is a
823 constant string length. So be optimistic and just
824 continue with the next argument. */
825 if (arg
== gimple_phi_result (def_stmt
))
828 if (!get_maxval_strlen (arg
, length
, visited
, type
))
840 /* Fold builtin call in statement STMT. Returns a simplified tree.
841 We may return a non-constant expression, including another call
842 to a different function and with different arguments, e.g.,
843 substituting memcpy for strcpy when the string length is known.
844 Note that some builtins expand into inline code that may not
845 be valid in GIMPLE. Callers must take care. */
848 gimple_fold_builtin (gimple stmt
)
856 location_t loc
= gimple_location (stmt
);
858 gcc_assert (is_gimple_call (stmt
));
860 ignore
= (gimple_call_lhs (stmt
) == NULL
);
862 /* First try the generic builtin folder. If that succeeds, return the
864 result
= fold_call_stmt (stmt
, ignore
);
872 /* Ignore MD builtins. */
873 callee
= gimple_call_fndecl (stmt
);
874 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
877 /* Give up for always_inline inline builtins until they are
879 if (avoid_folding_inline_builtin (callee
))
882 /* If the builtin could not be folded, and it has no argument list,
884 nargs
= gimple_call_num_args (stmt
);
888 /* Limit the work only for builtins we know how to simplify. */
889 switch (DECL_FUNCTION_CODE (callee
))
891 case BUILT_IN_STRLEN
:
893 case BUILT_IN_FPUTS_UNLOCKED
:
897 case BUILT_IN_STRCPY
:
898 case BUILT_IN_STRNCPY
:
902 case BUILT_IN_MEMCPY_CHK
:
903 case BUILT_IN_MEMPCPY_CHK
:
904 case BUILT_IN_MEMMOVE_CHK
:
905 case BUILT_IN_MEMSET_CHK
:
906 case BUILT_IN_STRNCPY_CHK
:
907 case BUILT_IN_STPNCPY_CHK
:
911 case BUILT_IN_STRCPY_CHK
:
912 case BUILT_IN_STPCPY_CHK
:
916 case BUILT_IN_SNPRINTF_CHK
:
917 case BUILT_IN_VSNPRINTF_CHK
:
925 if (arg_idx
>= nargs
)
928 /* Try to use the dataflow information gathered by the CCP process. */
929 visited
= BITMAP_ALLOC (NULL
);
930 bitmap_clear (visited
);
932 memset (val
, 0, sizeof (val
));
933 a
= gimple_call_arg (stmt
, arg_idx
);
934 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
935 val
[arg_idx
] = NULL_TREE
;
937 BITMAP_FREE (visited
);
940 switch (DECL_FUNCTION_CODE (callee
))
942 case BUILT_IN_STRLEN
:
943 if (val
[0] && nargs
== 1)
946 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
948 /* If the result is not a valid gimple value, or not a cast
949 of a valid gimple value, then we cannot use the result. */
950 if (is_gimple_val (new_val
)
951 || (CONVERT_EXPR_P (new_val
)
952 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
957 case BUILT_IN_STRCPY
:
958 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
959 result
= fold_builtin_strcpy (loc
, callee
,
960 gimple_call_arg (stmt
, 0),
961 gimple_call_arg (stmt
, 1),
965 case BUILT_IN_STRNCPY
:
966 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
967 result
= fold_builtin_strncpy (loc
, callee
,
968 gimple_call_arg (stmt
, 0),
969 gimple_call_arg (stmt
, 1),
970 gimple_call_arg (stmt
, 2),
976 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
977 gimple_call_arg (stmt
, 1),
978 ignore
, false, val
[0]);
981 case BUILT_IN_FPUTS_UNLOCKED
:
983 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
984 gimple_call_arg (stmt
, 1),
985 ignore
, true, val
[0]);
988 case BUILT_IN_MEMCPY_CHK
:
989 case BUILT_IN_MEMPCPY_CHK
:
990 case BUILT_IN_MEMMOVE_CHK
:
991 case BUILT_IN_MEMSET_CHK
:
992 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
993 result
= fold_builtin_memory_chk (loc
, callee
,
994 gimple_call_arg (stmt
, 0),
995 gimple_call_arg (stmt
, 1),
996 gimple_call_arg (stmt
, 2),
997 gimple_call_arg (stmt
, 3),
999 DECL_FUNCTION_CODE (callee
));
1002 case BUILT_IN_STRCPY_CHK
:
1003 case BUILT_IN_STPCPY_CHK
:
1004 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1005 result
= fold_builtin_stxcpy_chk (loc
, callee
,
1006 gimple_call_arg (stmt
, 0),
1007 gimple_call_arg (stmt
, 1),
1008 gimple_call_arg (stmt
, 2),
1010 DECL_FUNCTION_CODE (callee
));
1013 case BUILT_IN_STRNCPY_CHK
:
1014 case BUILT_IN_STPNCPY_CHK
:
1015 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1016 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1017 gimple_call_arg (stmt
, 1),
1018 gimple_call_arg (stmt
, 2),
1019 gimple_call_arg (stmt
, 3),
1021 DECL_FUNCTION_CODE (callee
));
1024 case BUILT_IN_SNPRINTF_CHK
:
1025 case BUILT_IN_VSNPRINTF_CHK
:
1026 if (val
[1] && is_gimple_val (val
[1]))
1027 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1028 DECL_FUNCTION_CODE (callee
));
1035 if (result
&& ignore
)
1036 result
= fold_ignored_result (result
);
1041 /* Return a binfo to be used for devirtualization of calls based on an object
1042 represented by a declaration (i.e. a global or automatically allocated one)
1043 or NULL if it cannot be found or is not safe. CST is expected to be an
1044 ADDR_EXPR of such object or the function will return NULL. Currently it is
1045 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1046 EXPECTED_TYPE is type of the class virtual belongs to. */
1049 gimple_extract_devirt_binfo_from_cst (tree cst
, tree expected_type
)
1051 HOST_WIDE_INT offset
, size
, max_size
;
1052 tree base
, type
, binfo
;
1053 bool last_artificial
= false;
1055 if (!flag_devirtualize
1056 || TREE_CODE (cst
) != ADDR_EXPR
1057 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1060 cst
= TREE_OPERAND (cst
, 0);
1061 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1062 type
= TREE_TYPE (base
);
1066 || TREE_CODE (type
) != RECORD_TYPE
)
1069 /* Find the sub-object the constant actually refers to and mark whether it is
1070 an artificial one (as opposed to a user-defined one). */
1073 HOST_WIDE_INT pos
, size
;
1076 if (types_same_for_odr (type
, expected_type
))
1081 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1083 if (TREE_CODE (fld
) != FIELD_DECL
)
1086 pos
= int_bit_position (fld
);
1087 size
= tree_to_uhwi (DECL_SIZE (fld
));
1088 if (pos
<= offset
&& (pos
+ size
) > offset
)
1091 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1094 last_artificial
= DECL_ARTIFICIAL (fld
);
1095 type
= TREE_TYPE (fld
);
1098 /* Artificial sub-objects are ancestors, we do not want to use them for
1099 devirtualization, at least not here. */
1100 if (last_artificial
)
1102 binfo
= TYPE_BINFO (type
);
1103 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1109 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1110 The statement may be replaced by another statement, e.g., if the call
1111 simplifies to a constant value. Return true if any changes were made.
1112 It is assumed that the operands have been previously folded. */
1115 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1117 gimple stmt
= gsi_stmt (*gsi
);
1119 bool changed
= false;
1122 /* Fold *& in call arguments. */
1123 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1124 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1126 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1129 gimple_call_set_arg (stmt
, i
, tmp
);
1134 /* Check for virtual calls that became direct calls. */
1135 callee
= gimple_call_fn (stmt
);
1136 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1138 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1140 if (dump_file
&& virtual_method_call_p (callee
)
1141 && !possible_polymorphic_call_target_p
1142 (callee
, cgraph_get_node (gimple_call_addr_fndecl
1143 (OBJ_TYPE_REF_EXPR (callee
)))))
1146 "Type inheritnace inconsistent devirtualization of ");
1147 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1148 fprintf (dump_file
, " to ");
1149 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
1150 fprintf (dump_file
, "\n");
1153 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1156 else if (virtual_method_call_p (callee
))
1158 tree obj
= OBJ_TYPE_REF_OBJECT (callee
);
1159 tree binfo
= gimple_extract_devirt_binfo_from_cst
1160 (obj
, obj_type_ref_class (callee
));
1164 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1165 tree fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
);
1168 #ifdef ENABLE_CHECKING
1169 gcc_assert (possible_polymorphic_call_target_p
1170 (callee
, cgraph_get_node (fndecl
)));
1173 gimple_call_set_fndecl (stmt
, fndecl
);
1183 /* Check for builtins that CCP can handle using information not
1184 available in the generic fold routines. */
1185 callee
= gimple_call_fndecl (stmt
);
1186 if (callee
&& DECL_BUILT_IN (callee
))
1188 tree result
= gimple_fold_builtin (stmt
);
1191 if (!update_call_from_tree (gsi
, result
))
1192 gimplify_and_update_call_from_tree (gsi
, result
);
1195 else if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1196 changed
|= targetm
.gimple_fold_builtin (gsi
);
1202 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1203 distinguishes both cases. */
1206 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1208 bool changed
= false;
1209 gimple stmt
= gsi_stmt (*gsi
);
1212 /* Fold the main computation performed by the statement. */
1213 switch (gimple_code (stmt
))
1217 unsigned old_num_ops
= gimple_num_ops (stmt
);
1218 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1219 tree lhs
= gimple_assign_lhs (stmt
);
1221 /* First canonicalize operand order. This avoids building new
1222 trees if this is the only thing fold would later do. */
1223 if ((commutative_tree_code (subcode
)
1224 || commutative_ternary_tree_code (subcode
))
1225 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1226 gimple_assign_rhs2 (stmt
), false))
1228 tree tem
= gimple_assign_rhs1 (stmt
);
1229 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1230 gimple_assign_set_rhs2 (stmt
, tem
);
1233 new_rhs
= fold_gimple_assign (gsi
);
1235 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1236 TREE_TYPE (new_rhs
)))
1237 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1240 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1242 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1249 changed
|= fold_gimple_cond (stmt
);
1253 changed
|= gimple_fold_call (gsi
, inplace
);
1257 /* Fold *& in asm operands. */
1260 const char **oconstraints
;
1261 const char *constraint
;
1262 bool allows_mem
, allows_reg
;
1264 noutputs
= gimple_asm_noutputs (stmt
);
1265 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1267 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1269 tree link
= gimple_asm_output_op (stmt
, i
);
1270 tree op
= TREE_VALUE (link
);
1272 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1273 if (REFERENCE_CLASS_P (op
)
1274 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1276 TREE_VALUE (link
) = op
;
1280 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1282 tree link
= gimple_asm_input_op (stmt
, i
);
1283 tree op
= TREE_VALUE (link
);
1285 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1286 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1287 oconstraints
, &allows_mem
, &allows_reg
);
1288 if (REFERENCE_CLASS_P (op
)
1289 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1292 TREE_VALUE (link
) = op
;
1300 if (gimple_debug_bind_p (stmt
))
1302 tree val
= gimple_debug_bind_get_value (stmt
);
1304 && REFERENCE_CLASS_P (val
))
1306 tree tem
= maybe_fold_reference (val
, false);
1309 gimple_debug_bind_set_value (stmt
, tem
);
1314 && TREE_CODE (val
) == ADDR_EXPR
)
1316 tree ref
= TREE_OPERAND (val
, 0);
1317 tree tem
= maybe_fold_reference (ref
, false);
1320 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1321 gimple_debug_bind_set_value (stmt
, tem
);
1331 stmt
= gsi_stmt (*gsi
);
1333 /* Fold *& on the lhs. */
1334 if (gimple_has_lhs (stmt
))
1336 tree lhs
= gimple_get_lhs (stmt
);
1337 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1339 tree new_lhs
= maybe_fold_reference (lhs
, true);
1342 gimple_set_lhs (stmt
, new_lhs
);
1351 /* Fold the statement pointed to by GSI. In some cases, this function may
1352 replace the whole statement with a new one. Returns true iff folding
1354 The statement pointed to by GSI should be in valid gimple form but may
1355 be in unfolded state as resulting from for example constant propagation
1356 which can produce *&x = 0. */
1359 fold_stmt (gimple_stmt_iterator
*gsi
)
1361 return fold_stmt_1 (gsi
, false);
1364 /* Perform the minimal folding on statement *GSI. Only operations like
1365 *&x created by constant propagation are handled. The statement cannot
1366 be replaced with a new one. Return true if the statement was
1367 changed, false otherwise.
1368 The statement *GSI should be in valid gimple form but may
1369 be in unfolded state as resulting from for example constant propagation
1370 which can produce *&x = 0. */
1373 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1375 gimple stmt
= gsi_stmt (*gsi
);
1376 bool changed
= fold_stmt_1 (gsi
, true);
1377 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1381 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1382 if EXPR is null or we don't know how.
1383 If non-null, the result always has boolean type. */
1386 canonicalize_bool (tree expr
, bool invert
)
1392 if (integer_nonzerop (expr
))
1393 return boolean_false_node
;
1394 else if (integer_zerop (expr
))
1395 return boolean_true_node
;
1396 else if (TREE_CODE (expr
) == SSA_NAME
)
1397 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1398 build_int_cst (TREE_TYPE (expr
), 0));
1399 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1400 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1402 TREE_OPERAND (expr
, 0),
1403 TREE_OPERAND (expr
, 1));
1409 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1411 if (integer_nonzerop (expr
))
1412 return boolean_true_node
;
1413 else if (integer_zerop (expr
))
1414 return boolean_false_node
;
1415 else if (TREE_CODE (expr
) == SSA_NAME
)
1416 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1417 build_int_cst (TREE_TYPE (expr
), 0));
1418 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1419 return fold_build2 (TREE_CODE (expr
),
1421 TREE_OPERAND (expr
, 0),
1422 TREE_OPERAND (expr
, 1));
1428 /* Check to see if a boolean expression EXPR is logically equivalent to the
1429 comparison (OP1 CODE OP2). Check for various identities involving
1433 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1434 const_tree op1
, const_tree op2
)
1438 /* The obvious case. */
1439 if (TREE_CODE (expr
) == code
1440 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1441 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1444 /* Check for comparing (name, name != 0) and the case where expr
1445 is an SSA_NAME with a definition matching the comparison. */
1446 if (TREE_CODE (expr
) == SSA_NAME
1447 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1449 if (operand_equal_p (expr
, op1
, 0))
1450 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1451 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1452 s
= SSA_NAME_DEF_STMT (expr
);
1453 if (is_gimple_assign (s
)
1454 && gimple_assign_rhs_code (s
) == code
1455 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1456 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1460 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1461 of name is a comparison, recurse. */
1462 if (TREE_CODE (op1
) == SSA_NAME
1463 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1465 s
= SSA_NAME_DEF_STMT (op1
);
1466 if (is_gimple_assign (s
)
1467 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1469 enum tree_code c
= gimple_assign_rhs_code (s
);
1470 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1471 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1472 return same_bool_comparison_p (expr
, c
,
1473 gimple_assign_rhs1 (s
),
1474 gimple_assign_rhs2 (s
));
1475 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1476 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1477 return same_bool_comparison_p (expr
,
1478 invert_tree_comparison (c
, false),
1479 gimple_assign_rhs1 (s
),
1480 gimple_assign_rhs2 (s
));
1486 /* Check to see if two boolean expressions OP1 and OP2 are logically
1490 same_bool_result_p (const_tree op1
, const_tree op2
)
1492 /* Simple cases first. */
1493 if (operand_equal_p (op1
, op2
, 0))
1496 /* Check the cases where at least one of the operands is a comparison.
1497 These are a bit smarter than operand_equal_p in that they apply some
1498 identifies on SSA_NAMEs. */
1499 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1500 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1501 TREE_OPERAND (op2
, 0),
1502 TREE_OPERAND (op2
, 1)))
1504 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1505 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1506 TREE_OPERAND (op1
, 0),
1507 TREE_OPERAND (op1
, 1)))
1514 /* Forward declarations for some mutually recursive functions. */
1517 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1518 enum tree_code code2
, tree op2a
, tree op2b
);
1520 and_var_with_comparison (tree var
, bool invert
,
1521 enum tree_code code2
, tree op2a
, tree op2b
);
1523 and_var_with_comparison_1 (gimple stmt
,
1524 enum tree_code code2
, tree op2a
, tree op2b
);
1526 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1527 enum tree_code code2
, tree op2a
, tree op2b
);
1529 or_var_with_comparison (tree var
, bool invert
,
1530 enum tree_code code2
, tree op2a
, tree op2b
);
1532 or_var_with_comparison_1 (gimple stmt
,
1533 enum tree_code code2
, tree op2a
, tree op2b
);
1535 /* Helper function for and_comparisons_1: try to simplify the AND of the
1536 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1537 If INVERT is true, invert the value of the VAR before doing the AND.
1538 Return NULL_EXPR if we can't simplify this to a single expression. */
1541 and_var_with_comparison (tree var
, bool invert
,
1542 enum tree_code code2
, tree op2a
, tree op2b
)
1545 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1547 /* We can only deal with variables whose definitions are assignments. */
1548 if (!is_gimple_assign (stmt
))
1551 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1552 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1553 Then we only have to consider the simpler non-inverted cases. */
1555 t
= or_var_with_comparison_1 (stmt
,
1556 invert_tree_comparison (code2
, false),
1559 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1560 return canonicalize_bool (t
, invert
);
1563 /* Try to simplify the AND of the ssa variable defined by the assignment
1564 STMT with the comparison specified by (OP2A CODE2 OP2B).
1565 Return NULL_EXPR if we can't simplify this to a single expression. */
1568 and_var_with_comparison_1 (gimple stmt
,
1569 enum tree_code code2
, tree op2a
, tree op2b
)
1571 tree var
= gimple_assign_lhs (stmt
);
1572 tree true_test_var
= NULL_TREE
;
1573 tree false_test_var
= NULL_TREE
;
1574 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1576 /* Check for identities like (var AND (var == 0)) => false. */
1577 if (TREE_CODE (op2a
) == SSA_NAME
1578 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1580 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1581 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1583 true_test_var
= op2a
;
1584 if (var
== true_test_var
)
1587 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1588 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1590 false_test_var
= op2a
;
1591 if (var
== false_test_var
)
1592 return boolean_false_node
;
1596 /* If the definition is a comparison, recurse on it. */
1597 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1599 tree t
= and_comparisons_1 (innercode
,
1600 gimple_assign_rhs1 (stmt
),
1601 gimple_assign_rhs2 (stmt
),
1609 /* If the definition is an AND or OR expression, we may be able to
1610 simplify by reassociating. */
1611 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1612 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1614 tree inner1
= gimple_assign_rhs1 (stmt
);
1615 tree inner2
= gimple_assign_rhs2 (stmt
);
1618 tree partial
= NULL_TREE
;
1619 bool is_and
= (innercode
== BIT_AND_EXPR
);
1621 /* Check for boolean identities that don't require recursive examination
1623 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1624 inner1 AND (inner1 OR inner2) => inner1
1625 !inner1 AND (inner1 AND inner2) => false
1626 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1627 Likewise for similar cases involving inner2. */
1628 if (inner1
== true_test_var
)
1629 return (is_and
? var
: inner1
);
1630 else if (inner2
== true_test_var
)
1631 return (is_and
? var
: inner2
);
1632 else if (inner1
== false_test_var
)
1634 ? boolean_false_node
1635 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1636 else if (inner2
== false_test_var
)
1638 ? boolean_false_node
1639 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1641 /* Next, redistribute/reassociate the AND across the inner tests.
1642 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1643 if (TREE_CODE (inner1
) == SSA_NAME
1644 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1645 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1646 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1647 gimple_assign_rhs1 (s
),
1648 gimple_assign_rhs2 (s
),
1649 code2
, op2a
, op2b
)))
1651 /* Handle the AND case, where we are reassociating:
1652 (inner1 AND inner2) AND (op2a code2 op2b)
1654 If the partial result t is a constant, we win. Otherwise
1655 continue on to try reassociating with the other inner test. */
1658 if (integer_onep (t
))
1660 else if (integer_zerop (t
))
1661 return boolean_false_node
;
1664 /* Handle the OR case, where we are redistributing:
1665 (inner1 OR inner2) AND (op2a code2 op2b)
1666 => (t OR (inner2 AND (op2a code2 op2b))) */
1667 else if (integer_onep (t
))
1668 return boolean_true_node
;
1670 /* Save partial result for later. */
1674 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1675 if (TREE_CODE (inner2
) == SSA_NAME
1676 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1677 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1678 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1679 gimple_assign_rhs1 (s
),
1680 gimple_assign_rhs2 (s
),
1681 code2
, op2a
, op2b
)))
1683 /* Handle the AND case, where we are reassociating:
1684 (inner1 AND inner2) AND (op2a code2 op2b)
1685 => (inner1 AND t) */
1688 if (integer_onep (t
))
1690 else if (integer_zerop (t
))
1691 return boolean_false_node
;
1692 /* If both are the same, we can apply the identity
1694 else if (partial
&& same_bool_result_p (t
, partial
))
1698 /* Handle the OR case. where we are redistributing:
1699 (inner1 OR inner2) AND (op2a code2 op2b)
1700 => (t OR (inner1 AND (op2a code2 op2b)))
1701 => (t OR partial) */
1704 if (integer_onep (t
))
1705 return boolean_true_node
;
1708 /* We already got a simplification for the other
1709 operand to the redistributed OR expression. The
1710 interesting case is when at least one is false.
1711 Or, if both are the same, we can apply the identity
1713 if (integer_zerop (partial
))
1715 else if (integer_zerop (t
))
1717 else if (same_bool_result_p (t
, partial
))
1726 /* Try to simplify the AND of two comparisons defined by
1727 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1728 If this can be done without constructing an intermediate value,
1729 return the resulting tree; otherwise NULL_TREE is returned.
1730 This function is deliberately asymmetric as it recurses on SSA_DEFs
1731 in the first comparison but not the second. */
1734 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1735 enum tree_code code2
, tree op2a
, tree op2b
)
1737 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
1739 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1740 if (operand_equal_p (op1a
, op2a
, 0)
1741 && operand_equal_p (op1b
, op2b
, 0))
1743 /* Result will be either NULL_TREE, or a combined comparison. */
1744 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1745 TRUTH_ANDIF_EXPR
, code1
, code2
,
1746 truth_type
, op1a
, op1b
);
1751 /* Likewise the swapped case of the above. */
1752 if (operand_equal_p (op1a
, op2b
, 0)
1753 && operand_equal_p (op1b
, op2a
, 0))
1755 /* Result will be either NULL_TREE, or a combined comparison. */
1756 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1757 TRUTH_ANDIF_EXPR
, code1
,
1758 swap_tree_comparison (code2
),
1759 truth_type
, op1a
, op1b
);
1764 /* If both comparisons are of the same value against constants, we might
1765 be able to merge them. */
1766 if (operand_equal_p (op1a
, op2a
, 0)
1767 && TREE_CODE (op1b
) == INTEGER_CST
1768 && TREE_CODE (op2b
) == INTEGER_CST
)
1770 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1772 /* If we have (op1a == op1b), we should either be able to
1773 return that or FALSE, depending on whether the constant op1b
1774 also satisfies the other comparison against op2b. */
1775 if (code1
== EQ_EXPR
)
1781 case EQ_EXPR
: val
= (cmp
== 0); break;
1782 case NE_EXPR
: val
= (cmp
!= 0); break;
1783 case LT_EXPR
: val
= (cmp
< 0); break;
1784 case GT_EXPR
: val
= (cmp
> 0); break;
1785 case LE_EXPR
: val
= (cmp
<= 0); break;
1786 case GE_EXPR
: val
= (cmp
>= 0); break;
1787 default: done
= false;
1792 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1794 return boolean_false_node
;
1797 /* Likewise if the second comparison is an == comparison. */
1798 else if (code2
== EQ_EXPR
)
1804 case EQ_EXPR
: val
= (cmp
== 0); break;
1805 case NE_EXPR
: val
= (cmp
!= 0); break;
1806 case LT_EXPR
: val
= (cmp
> 0); break;
1807 case GT_EXPR
: val
= (cmp
< 0); break;
1808 case LE_EXPR
: val
= (cmp
>= 0); break;
1809 case GE_EXPR
: val
= (cmp
<= 0); break;
1810 default: done
= false;
1815 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1817 return boolean_false_node
;
1821 /* Same business with inequality tests. */
1822 else if (code1
== NE_EXPR
)
1827 case EQ_EXPR
: val
= (cmp
!= 0); break;
1828 case NE_EXPR
: val
= (cmp
== 0); break;
1829 case LT_EXPR
: val
= (cmp
>= 0); break;
1830 case GT_EXPR
: val
= (cmp
<= 0); break;
1831 case LE_EXPR
: val
= (cmp
> 0); break;
1832 case GE_EXPR
: val
= (cmp
< 0); break;
1837 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1839 else if (code2
== NE_EXPR
)
1844 case EQ_EXPR
: val
= (cmp
== 0); break;
1845 case NE_EXPR
: val
= (cmp
!= 0); break;
1846 case LT_EXPR
: val
= (cmp
<= 0); break;
1847 case GT_EXPR
: val
= (cmp
>= 0); break;
1848 case LE_EXPR
: val
= (cmp
< 0); break;
1849 case GE_EXPR
: val
= (cmp
> 0); break;
1854 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1857 /* Chose the more restrictive of two < or <= comparisons. */
1858 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1859 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1861 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1862 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1864 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1867 /* Likewise chose the more restrictive of two > or >= comparisons. */
1868 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1869 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1871 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1872 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1874 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1877 /* Check for singleton ranges. */
1879 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1880 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1881 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1883 /* Check for disjoint ranges. */
1885 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1886 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1887 return boolean_false_node
;
1889 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1890 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1891 return boolean_false_node
;
1894 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1895 NAME's definition is a truth value. See if there are any simplifications
1896 that can be done against the NAME's definition. */
1897 if (TREE_CODE (op1a
) == SSA_NAME
1898 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1899 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1901 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1902 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1903 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1904 switch (gimple_code (stmt
))
1907 /* Try to simplify by copy-propagating the definition. */
1908 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1911 /* If every argument to the PHI produces the same result when
1912 ANDed with the second comparison, we win.
1913 Do not do this unless the type is bool since we need a bool
1914 result here anyway. */
1915 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1917 tree result
= NULL_TREE
;
1919 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1921 tree arg
= gimple_phi_arg_def (stmt
, i
);
1923 /* If this PHI has itself as an argument, ignore it.
1924 If all the other args produce the same result,
1926 if (arg
== gimple_phi_result (stmt
))
1928 else if (TREE_CODE (arg
) == INTEGER_CST
)
1930 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1933 result
= boolean_false_node
;
1934 else if (!integer_zerop (result
))
1938 result
= fold_build2 (code2
, boolean_type_node
,
1940 else if (!same_bool_comparison_p (result
,
1944 else if (TREE_CODE (arg
) == SSA_NAME
1945 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1948 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1949 /* In simple cases we can look through PHI nodes,
1950 but we have to be careful with loops.
1952 if (! dom_info_available_p (CDI_DOMINATORS
)
1953 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1954 || dominated_by_p (CDI_DOMINATORS
,
1955 gimple_bb (def_stmt
),
1958 temp
= and_var_with_comparison (arg
, invert
, code2
,
1964 else if (!same_bool_result_p (result
, temp
))
1980 /* Try to simplify the AND of two comparisons, specified by
1981 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1982 If this can be simplified to a single expression (without requiring
1983 introducing more SSA variables to hold intermediate values),
1984 return the resulting tree. Otherwise return NULL_TREE.
1985 If the result expression is non-null, it has boolean type. */
1988 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1989 enum tree_code code2
, tree op2a
, tree op2b
)
1991 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1995 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1998 /* Helper function for or_comparisons_1: try to simplify the OR of the
1999 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2000 If INVERT is true, invert the value of VAR before doing the OR.
2001 Return NULL_EXPR if we can't simplify this to a single expression. */
2004 or_var_with_comparison (tree var
, bool invert
,
2005 enum tree_code code2
, tree op2a
, tree op2b
)
2008 gimple stmt
= SSA_NAME_DEF_STMT (var
);
2010 /* We can only deal with variables whose definitions are assignments. */
2011 if (!is_gimple_assign (stmt
))
2014 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2015 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2016 Then we only have to consider the simpler non-inverted cases. */
2018 t
= and_var_with_comparison_1 (stmt
,
2019 invert_tree_comparison (code2
, false),
2022 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2023 return canonicalize_bool (t
, invert
);
2026 /* Try to simplify the OR of the ssa variable defined by the assignment
2027 STMT with the comparison specified by (OP2A CODE2 OP2B).
2028 Return NULL_EXPR if we can't simplify this to a single expression. */
2031 or_var_with_comparison_1 (gimple stmt
,
2032 enum tree_code code2
, tree op2a
, tree op2b
)
2034 tree var
= gimple_assign_lhs (stmt
);
2035 tree true_test_var
= NULL_TREE
;
2036 tree false_test_var
= NULL_TREE
;
2037 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2039 /* Check for identities like (var OR (var != 0)) => true . */
2040 if (TREE_CODE (op2a
) == SSA_NAME
2041 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2043 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2044 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2046 true_test_var
= op2a
;
2047 if (var
== true_test_var
)
2050 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2051 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2053 false_test_var
= op2a
;
2054 if (var
== false_test_var
)
2055 return boolean_true_node
;
2059 /* If the definition is a comparison, recurse on it. */
2060 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2062 tree t
= or_comparisons_1 (innercode
,
2063 gimple_assign_rhs1 (stmt
),
2064 gimple_assign_rhs2 (stmt
),
2072 /* If the definition is an AND or OR expression, we may be able to
2073 simplify by reassociating. */
2074 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2075 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2077 tree inner1
= gimple_assign_rhs1 (stmt
);
2078 tree inner2
= gimple_assign_rhs2 (stmt
);
2081 tree partial
= NULL_TREE
;
2082 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2084 /* Check for boolean identities that don't require recursive examination
2086 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2087 inner1 OR (inner1 AND inner2) => inner1
2088 !inner1 OR (inner1 OR inner2) => true
2089 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2091 if (inner1
== true_test_var
)
2092 return (is_or
? var
: inner1
);
2093 else if (inner2
== true_test_var
)
2094 return (is_or
? var
: inner2
);
2095 else if (inner1
== false_test_var
)
2098 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2099 else if (inner2
== false_test_var
)
2102 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2104 /* Next, redistribute/reassociate the OR across the inner tests.
2105 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2106 if (TREE_CODE (inner1
) == SSA_NAME
2107 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2108 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2109 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2110 gimple_assign_rhs1 (s
),
2111 gimple_assign_rhs2 (s
),
2112 code2
, op2a
, op2b
)))
2114 /* Handle the OR case, where we are reassociating:
2115 (inner1 OR inner2) OR (op2a code2 op2b)
2117 If the partial result t is a constant, we win. Otherwise
2118 continue on to try reassociating with the other inner test. */
2121 if (integer_onep (t
))
2122 return boolean_true_node
;
2123 else if (integer_zerop (t
))
2127 /* Handle the AND case, where we are redistributing:
2128 (inner1 AND inner2) OR (op2a code2 op2b)
2129 => (t AND (inner2 OR (op2a code op2b))) */
2130 else if (integer_zerop (t
))
2131 return boolean_false_node
;
2133 /* Save partial result for later. */
2137 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2138 if (TREE_CODE (inner2
) == SSA_NAME
2139 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2140 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2141 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2142 gimple_assign_rhs1 (s
),
2143 gimple_assign_rhs2 (s
),
2144 code2
, op2a
, op2b
)))
2146 /* Handle the OR case, where we are reassociating:
2147 (inner1 OR inner2) OR (op2a code2 op2b)
2149 => (t OR partial) */
2152 if (integer_zerop (t
))
2154 else if (integer_onep (t
))
2155 return boolean_true_node
;
2156 /* If both are the same, we can apply the identity
2158 else if (partial
&& same_bool_result_p (t
, partial
))
2162 /* Handle the AND case, where we are redistributing:
2163 (inner1 AND inner2) OR (op2a code2 op2b)
2164 => (t AND (inner1 OR (op2a code2 op2b)))
2165 => (t AND partial) */
2168 if (integer_zerop (t
))
2169 return boolean_false_node
;
2172 /* We already got a simplification for the other
2173 operand to the redistributed AND expression. The
2174 interesting case is when at least one is true.
2175 Or, if both are the same, we can apply the identity
2177 if (integer_onep (partial
))
2179 else if (integer_onep (t
))
2181 else if (same_bool_result_p (t
, partial
))
2190 /* Try to simplify the OR of two comparisons defined by
2191 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2192 If this can be done without constructing an intermediate value,
2193 return the resulting tree; otherwise NULL_TREE is returned.
2194 This function is deliberately asymmetric as it recurses on SSA_DEFs
2195 in the first comparison but not the second. */
2198 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2199 enum tree_code code2
, tree op2a
, tree op2b
)
2201 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
2203 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2204 if (operand_equal_p (op1a
, op2a
, 0)
2205 && operand_equal_p (op1b
, op2b
, 0))
2207 /* Result will be either NULL_TREE, or a combined comparison. */
2208 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2209 TRUTH_ORIF_EXPR
, code1
, code2
,
2210 truth_type
, op1a
, op1b
);
2215 /* Likewise the swapped case of the above. */
2216 if (operand_equal_p (op1a
, op2b
, 0)
2217 && operand_equal_p (op1b
, op2a
, 0))
2219 /* Result will be either NULL_TREE, or a combined comparison. */
2220 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2221 TRUTH_ORIF_EXPR
, code1
,
2222 swap_tree_comparison (code2
),
2223 truth_type
, op1a
, op1b
);
2228 /* If both comparisons are of the same value against constants, we might
2229 be able to merge them. */
2230 if (operand_equal_p (op1a
, op2a
, 0)
2231 && TREE_CODE (op1b
) == INTEGER_CST
2232 && TREE_CODE (op2b
) == INTEGER_CST
)
2234 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2236 /* If we have (op1a != op1b), we should either be able to
2237 return that or TRUE, depending on whether the constant op1b
2238 also satisfies the other comparison against op2b. */
2239 if (code1
== NE_EXPR
)
2245 case EQ_EXPR
: val
= (cmp
== 0); break;
2246 case NE_EXPR
: val
= (cmp
!= 0); break;
2247 case LT_EXPR
: val
= (cmp
< 0); break;
2248 case GT_EXPR
: val
= (cmp
> 0); break;
2249 case LE_EXPR
: val
= (cmp
<= 0); break;
2250 case GE_EXPR
: val
= (cmp
>= 0); break;
2251 default: done
= false;
2256 return boolean_true_node
;
2258 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2261 /* Likewise if the second comparison is a != comparison. */
2262 else if (code2
== NE_EXPR
)
2268 case EQ_EXPR
: val
= (cmp
== 0); break;
2269 case NE_EXPR
: val
= (cmp
!= 0); break;
2270 case LT_EXPR
: val
= (cmp
> 0); break;
2271 case GT_EXPR
: val
= (cmp
< 0); break;
2272 case LE_EXPR
: val
= (cmp
>= 0); break;
2273 case GE_EXPR
: val
= (cmp
<= 0); break;
2274 default: done
= false;
2279 return boolean_true_node
;
2281 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2285 /* See if an equality test is redundant with the other comparison. */
2286 else if (code1
== EQ_EXPR
)
2291 case EQ_EXPR
: val
= (cmp
== 0); break;
2292 case NE_EXPR
: val
= (cmp
!= 0); break;
2293 case LT_EXPR
: val
= (cmp
< 0); break;
2294 case GT_EXPR
: val
= (cmp
> 0); break;
2295 case LE_EXPR
: val
= (cmp
<= 0); break;
2296 case GE_EXPR
: val
= (cmp
>= 0); break;
2301 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2303 else if (code2
== EQ_EXPR
)
2308 case EQ_EXPR
: val
= (cmp
== 0); break;
2309 case NE_EXPR
: val
= (cmp
!= 0); break;
2310 case LT_EXPR
: val
= (cmp
> 0); break;
2311 case GT_EXPR
: val
= (cmp
< 0); break;
2312 case LE_EXPR
: val
= (cmp
>= 0); break;
2313 case GE_EXPR
: val
= (cmp
<= 0); break;
2318 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2321 /* Chose the less restrictive of two < or <= comparisons. */
2322 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2323 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2325 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2326 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2328 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2331 /* Likewise chose the less restrictive of two > or >= comparisons. */
2332 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2333 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2335 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2336 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2338 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2341 /* Check for singleton ranges. */
2343 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2344 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2345 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2347 /* Check for less/greater pairs that don't restrict the range at all. */
2349 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2350 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2351 return boolean_true_node
;
2353 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2354 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2355 return boolean_true_node
;
2358 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2359 NAME's definition is a truth value. See if there are any simplifications
2360 that can be done against the NAME's definition. */
2361 if (TREE_CODE (op1a
) == SSA_NAME
2362 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2363 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2365 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2366 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2367 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2368 switch (gimple_code (stmt
))
2371 /* Try to simplify by copy-propagating the definition. */
2372 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2375 /* If every argument to the PHI produces the same result when
2376 ORed with the second comparison, we win.
2377 Do not do this unless the type is bool since we need a bool
2378 result here anyway. */
2379 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2381 tree result
= NULL_TREE
;
2383 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2385 tree arg
= gimple_phi_arg_def (stmt
, i
);
2387 /* If this PHI has itself as an argument, ignore it.
2388 If all the other args produce the same result,
2390 if (arg
== gimple_phi_result (stmt
))
2392 else if (TREE_CODE (arg
) == INTEGER_CST
)
2394 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2397 result
= boolean_true_node
;
2398 else if (!integer_onep (result
))
2402 result
= fold_build2 (code2
, boolean_type_node
,
2404 else if (!same_bool_comparison_p (result
,
2408 else if (TREE_CODE (arg
) == SSA_NAME
2409 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2412 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2413 /* In simple cases we can look through PHI nodes,
2414 but we have to be careful with loops.
2416 if (! dom_info_available_p (CDI_DOMINATORS
)
2417 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2418 || dominated_by_p (CDI_DOMINATORS
,
2419 gimple_bb (def_stmt
),
2422 temp
= or_var_with_comparison (arg
, invert
, code2
,
2428 else if (!same_bool_result_p (result
, temp
))
2444 /* Try to simplify the OR of two comparisons, specified by
2445 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2446 If this can be simplified to a single expression (without requiring
2447 introducing more SSA variables to hold intermediate values),
2448 return the resulting tree. Otherwise return NULL_TREE.
2449 If the result expression is non-null, it has boolean type. */
2452 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2453 enum tree_code code2
, tree op2a
, tree op2b
)
2455 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2459 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2463 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2465 Either NULL_TREE, a simplified but non-constant or a constant
2468 ??? This should go into a gimple-fold-inline.h file to be eventually
2469 privatized with the single valueize function used in the various TUs
2470 to avoid the indirect function call overhead. */
2473 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2475 location_t loc
= gimple_location (stmt
);
2476 switch (gimple_code (stmt
))
2480 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2482 switch (get_gimple_rhs_class (subcode
))
2484 case GIMPLE_SINGLE_RHS
:
2486 tree rhs
= gimple_assign_rhs1 (stmt
);
2487 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2489 if (TREE_CODE (rhs
) == SSA_NAME
)
2491 /* If the RHS is an SSA_NAME, return its known constant value,
2493 return (*valueize
) (rhs
);
2495 /* Handle propagating invariant addresses into address
2497 else if (TREE_CODE (rhs
) == ADDR_EXPR
2498 && !is_gimple_min_invariant (rhs
))
2500 HOST_WIDE_INT offset
= 0;
2502 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2506 && (CONSTANT_CLASS_P (base
)
2507 || decl_address_invariant_p (base
)))
2508 return build_invariant_address (TREE_TYPE (rhs
),
2511 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2512 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2513 && (CONSTRUCTOR_NELTS (rhs
)
2514 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2519 vec
= XALLOCAVEC (tree
,
2520 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2521 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2523 val
= (*valueize
) (val
);
2524 if (TREE_CODE (val
) == INTEGER_CST
2525 || TREE_CODE (val
) == REAL_CST
2526 || TREE_CODE (val
) == FIXED_CST
)
2532 return build_vector (TREE_TYPE (rhs
), vec
);
2535 if (kind
== tcc_reference
)
2537 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2538 || TREE_CODE (rhs
) == REALPART_EXPR
2539 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2540 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2542 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2543 return fold_unary_loc (EXPR_LOCATION (rhs
),
2545 TREE_TYPE (rhs
), val
);
2547 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2548 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2550 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2551 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2553 TREE_TYPE (rhs
), val
,
2554 TREE_OPERAND (rhs
, 1),
2555 TREE_OPERAND (rhs
, 2));
2557 else if (TREE_CODE (rhs
) == MEM_REF
2558 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2560 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2561 if (TREE_CODE (val
) == ADDR_EXPR
2562 && is_gimple_min_invariant (val
))
2564 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2566 TREE_OPERAND (rhs
, 1));
2571 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2573 else if (kind
== tcc_declaration
)
2574 return get_symbol_constant_value (rhs
);
2578 case GIMPLE_UNARY_RHS
:
2580 /* Handle unary operators that can appear in GIMPLE form.
2581 Note that we know the single operand must be a constant,
2582 so this should almost always return a simplified RHS. */
2583 tree lhs
= gimple_assign_lhs (stmt
);
2584 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2586 /* Conversions are useless for CCP purposes if they are
2587 value-preserving. Thus the restrictions that
2588 useless_type_conversion_p places for restrict qualification
2589 of pointer types should not apply here.
2590 Substitution later will only substitute to allowed places. */
2591 if (CONVERT_EXPR_CODE_P (subcode
)
2592 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2593 && POINTER_TYPE_P (TREE_TYPE (op0
))
2594 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2595 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2596 && TYPE_MODE (TREE_TYPE (lhs
))
2597 == TYPE_MODE (TREE_TYPE (op0
)))
2601 fold_unary_ignore_overflow_loc (loc
, subcode
,
2602 gimple_expr_type (stmt
), op0
);
2605 case GIMPLE_BINARY_RHS
:
2607 /* Handle binary operators that can appear in GIMPLE form. */
2608 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2609 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2611 /* Translate &x + CST into an invariant form suitable for
2612 further propagation. */
2613 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2614 && TREE_CODE (op0
) == ADDR_EXPR
2615 && TREE_CODE (op1
) == INTEGER_CST
)
2617 tree off
= fold_convert (ptr_type_node
, op1
);
2618 return build_fold_addr_expr_loc
2620 fold_build2 (MEM_REF
,
2621 TREE_TYPE (TREE_TYPE (op0
)),
2622 unshare_expr (op0
), off
));
2625 return fold_binary_loc (loc
, subcode
,
2626 gimple_expr_type (stmt
), op0
, op1
);
2629 case GIMPLE_TERNARY_RHS
:
2631 /* Handle ternary operators that can appear in GIMPLE form. */
2632 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2633 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2634 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2636 /* Fold embedded expressions in ternary codes. */
2637 if ((subcode
== COND_EXPR
2638 || subcode
== VEC_COND_EXPR
)
2639 && COMPARISON_CLASS_P (op0
))
2641 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2642 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2643 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2644 TREE_TYPE (op0
), op00
, op01
);
2649 return fold_ternary_loc (loc
, subcode
,
2650 gimple_expr_type (stmt
), op0
, op1
, op2
);
2662 if (gimple_call_internal_p (stmt
))
2664 enum tree_code subcode
= ERROR_MARK
;
2665 switch (gimple_call_internal_fn (stmt
))
2667 case IFN_UBSAN_CHECK_ADD
:
2668 subcode
= PLUS_EXPR
;
2670 case IFN_UBSAN_CHECK_SUB
:
2671 subcode
= MINUS_EXPR
;
2673 case IFN_UBSAN_CHECK_MUL
:
2674 subcode
= MULT_EXPR
;
2679 tree op0
= (*valueize
) (gimple_call_arg (stmt
, 0));
2680 tree op1
= (*valueize
) (gimple_call_arg (stmt
, 1));
2682 if (TREE_CODE (op0
) != INTEGER_CST
2683 || TREE_CODE (op1
) != INTEGER_CST
)
2685 tree res
= fold_binary_loc (loc
, subcode
,
2686 TREE_TYPE (gimple_call_arg (stmt
, 0)),
2689 && TREE_CODE (res
) == INTEGER_CST
2690 && !TREE_OVERFLOW (res
))
2695 fn
= (*valueize
) (gimple_call_fn (stmt
));
2696 if (TREE_CODE (fn
) == ADDR_EXPR
2697 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2698 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2700 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2703 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2704 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2705 call
= build_call_array_loc (loc
,
2706 gimple_call_return_type (stmt
),
2707 fn
, gimple_call_num_args (stmt
), args
);
2708 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2710 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2711 STRIP_NOPS (retval
);
2722 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2723 Returns NULL_TREE if folding to a constant is not possible, otherwise
2724 returns a constant according to is_gimple_min_invariant. */
2727 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2729 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2730 if (res
&& is_gimple_min_invariant (res
))
2736 /* The following set of functions are supposed to fold references using
2737 their constant initializers. */
2739 static tree
fold_ctor_reference (tree type
, tree ctor
,
2740 unsigned HOST_WIDE_INT offset
,
2741 unsigned HOST_WIDE_INT size
, tree
);
2743 /* See if we can find constructor defining value of BASE.
2744 When we know the consructor with constant offset (such as
2745 base is array[40] and we do know constructor of array), then
2746 BIT_OFFSET is adjusted accordingly.
2748 As a special case, return error_mark_node when constructor
2749 is not explicitly available, but it is known to be zero
2750 such as 'static const int a;'. */
2752 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2753 tree (*valueize
)(tree
))
2755 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2756 if (TREE_CODE (base
) == MEM_REF
)
2758 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2760 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
2762 *bit_offset
+= (mem_ref_offset (base
).low
2767 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2768 base
= valueize (TREE_OPERAND (base
, 0));
2769 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2771 base
= TREE_OPERAND (base
, 0);
2774 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2775 DECL_INITIAL. If BASE is a nested reference into another
2776 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2777 the inner reference. */
2778 switch (TREE_CODE (base
))
2783 tree init
= ctor_for_folding (base
);
2785 /* Our semantic is exact opposite of ctor_for_folding;
2786 NULL means unknown, while error_mark_node is 0. */
2787 if (init
== error_mark_node
)
2790 return error_mark_node
;
2796 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2797 if (max_size
== -1 || size
!= max_size
)
2799 *bit_offset
+= bit_offset2
;
2800 return get_base_constructor (base
, bit_offset
, valueize
);
2811 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2812 to the memory at bit OFFSET.
2814 We do only simple job of folding byte accesses. */
2817 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2818 unsigned HOST_WIDE_INT offset
,
2819 unsigned HOST_WIDE_INT size
)
2821 if (INTEGRAL_TYPE_P (type
)
2822 && (TYPE_MODE (type
)
2823 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2824 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2826 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2827 && size
== BITS_PER_UNIT
2828 && !(offset
% BITS_PER_UNIT
))
2830 offset
/= BITS_PER_UNIT
;
2831 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2832 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2835 const char a[20]="hello";
2838 might lead to offset greater than string length. In this case we
2839 know value is either initialized to 0 or out of bounds. Return 0
2841 return build_zero_cst (type
);
2846 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2847 SIZE to the memory at bit OFFSET. */
2850 fold_array_ctor_reference (tree type
, tree ctor
,
2851 unsigned HOST_WIDE_INT offset
,
2852 unsigned HOST_WIDE_INT size
,
2855 unsigned HOST_WIDE_INT cnt
;
2857 double_int low_bound
, elt_size
;
2858 double_int index
, max_index
;
2859 double_int access_index
;
2860 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2861 HOST_WIDE_INT inner_offset
;
2863 /* Compute low bound and elt size. */
2864 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2865 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2866 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2868 /* Static constructors for variably sized objects makes no sense. */
2869 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2870 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2871 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2874 low_bound
= double_int_zero
;
2875 /* Static constructors for variably sized objects makes no sense. */
2876 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2879 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2882 /* We can handle only constantly sized accesses that are known to not
2883 be larger than size of array element. */
2884 if (!TYPE_SIZE_UNIT (type
)
2885 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2886 || elt_size
.slt (tree_to_double_int (TYPE_SIZE_UNIT (type
))))
2889 /* Compute the array index we look for. */
2890 access_index
= double_int::from_uhwi (offset
/ BITS_PER_UNIT
)
2891 .udiv (elt_size
, TRUNC_DIV_EXPR
);
2892 access_index
+= low_bound
;
2894 access_index
= access_index
.ext (TYPE_PRECISION (index_type
),
2895 TYPE_UNSIGNED (index_type
));
2897 /* And offset within the access. */
2898 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
2900 /* See if the array field is large enough to span whole access. We do not
2901 care to fold accesses spanning multiple array indexes. */
2902 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
2905 index
= low_bound
- double_int_one
;
2907 index
= index
.ext (TYPE_PRECISION (index_type
), TYPE_UNSIGNED (index_type
));
2909 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2911 /* Array constructor might explicitely set index, or specify range
2912 or leave index NULL meaning that it is next index after previous
2916 if (TREE_CODE (cfield
) == INTEGER_CST
)
2917 max_index
= index
= tree_to_double_int (cfield
);
2920 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2921 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2922 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2927 index
+= double_int_one
;
2929 index
= index
.ext (TYPE_PRECISION (index_type
),
2930 TYPE_UNSIGNED (index_type
));
2934 /* Do we have match? */
2935 if (access_index
.cmp (index
, 1) >= 0
2936 && access_index
.cmp (max_index
, 1) <= 0)
2937 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
2940 /* When memory is not explicitely mentioned in constructor,
2941 it is 0 (or out of range). */
2942 return build_zero_cst (type
);
2945 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2946 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2949 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2950 unsigned HOST_WIDE_INT offset
,
2951 unsigned HOST_WIDE_INT size
,
2954 unsigned HOST_WIDE_INT cnt
;
2957 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2960 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2961 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2962 tree field_size
= DECL_SIZE (cfield
);
2963 double_int bitoffset
;
2964 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2965 double_int bits_per_unit_cst
= double_int::from_uhwi (BITS_PER_UNIT
);
2966 double_int bitoffset_end
, access_end
;
2968 /* Variable sized objects in static constructors makes no sense,
2969 but field_size can be NULL for flexible array members. */
2970 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2971 && TREE_CODE (byte_offset
) == INTEGER_CST
2972 && (field_size
!= NULL_TREE
2973 ? TREE_CODE (field_size
) == INTEGER_CST
2974 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2976 /* Compute bit offset of the field. */
2977 bitoffset
= tree_to_double_int (field_offset
)
2978 + byte_offset_cst
* bits_per_unit_cst
;
2979 /* Compute bit offset where the field ends. */
2980 if (field_size
!= NULL_TREE
)
2981 bitoffset_end
= bitoffset
+ tree_to_double_int (field_size
);
2983 bitoffset_end
= double_int_zero
;
2985 access_end
= double_int::from_uhwi (offset
)
2986 + double_int::from_uhwi (size
);
2988 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2989 [BITOFFSET, BITOFFSET_END)? */
2990 if (access_end
.cmp (bitoffset
, 0) > 0
2991 && (field_size
== NULL_TREE
2992 || double_int::from_uhwi (offset
).slt (bitoffset_end
)))
2994 double_int inner_offset
= double_int::from_uhwi (offset
) - bitoffset
;
2995 /* We do have overlap. Now see if field is large enough to
2996 cover the access. Give up for accesses spanning multiple
2998 if (access_end
.cmp (bitoffset_end
, 0) > 0)
3000 if (double_int::from_uhwi (offset
).slt (bitoffset
))
3002 return fold_ctor_reference (type
, cval
,
3003 inner_offset
.to_uhwi (), size
,
3007 /* When memory is not explicitely mentioned in constructor, it is 0. */
3008 return build_zero_cst (type
);
3011 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3012 to the memory at bit OFFSET. */
3015 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
3016 unsigned HOST_WIDE_INT size
, tree from_decl
)
3020 /* We found the field with exact match. */
3021 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
3023 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
3025 /* We are at the end of walk, see if we can view convert the
3027 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
3028 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3029 && operand_equal_p (TYPE_SIZE (type
),
3030 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
3032 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
3033 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
3038 if (TREE_CODE (ctor
) == STRING_CST
)
3039 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
3040 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
3043 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
3044 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
3045 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
3048 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
3055 /* Return the tree representing the element referenced by T if T is an
3056 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3057 names using VALUEIZE. Return NULL_TREE otherwise. */
3060 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
3062 tree ctor
, idx
, base
;
3063 HOST_WIDE_INT offset
, size
, max_size
;
3066 if (TREE_THIS_VOLATILE (t
))
3069 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3070 return get_symbol_constant_value (t
);
3072 tem
= fold_read_from_constant_string (t
);
3076 switch (TREE_CODE (t
))
3079 case ARRAY_RANGE_REF
:
3080 /* Constant indexes are handled well by get_base_constructor.
3081 Only special case variable offsets.
3082 FIXME: This code can't handle nested references with variable indexes
3083 (they will be handled only by iteration of ccp). Perhaps we can bring
3084 get_ref_base_and_extent here and make it use a valueize callback. */
3085 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3087 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3088 && TREE_CODE (idx
) == INTEGER_CST
)
3090 tree low_bound
, unit_size
;
3093 /* If the resulting bit-offset is constant, track it. */
3094 if ((low_bound
= array_ref_low_bound (t
),
3095 TREE_CODE (low_bound
) == INTEGER_CST
)
3096 && (unit_size
= array_ref_element_size (t
),
3097 tree_fits_uhwi_p (unit_size
))
3098 && (doffset
= (TREE_INT_CST (idx
) - TREE_INT_CST (low_bound
))
3099 .sext (TYPE_PRECISION (TREE_TYPE (idx
))),
3100 doffset
.fits_shwi ()))
3102 offset
= doffset
.to_shwi ();
3103 offset
*= tree_to_uhwi (unit_size
);
3104 offset
*= BITS_PER_UNIT
;
3106 base
= TREE_OPERAND (t
, 0);
3107 ctor
= get_base_constructor (base
, &offset
, valueize
);
3108 /* Empty constructor. Always fold to 0. */
3109 if (ctor
== error_mark_node
)
3110 return build_zero_cst (TREE_TYPE (t
));
3111 /* Out of bound array access. Value is undefined,
3115 /* We can not determine ctor. */
3118 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3119 tree_to_uhwi (unit_size
)
3128 case TARGET_MEM_REF
:
3130 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3131 ctor
= get_base_constructor (base
, &offset
, valueize
);
3133 /* Empty constructor. Always fold to 0. */
3134 if (ctor
== error_mark_node
)
3135 return build_zero_cst (TREE_TYPE (t
));
3136 /* We do not know precise address. */
3137 if (max_size
== -1 || max_size
!= size
)
3139 /* We can not determine ctor. */
3143 /* Out of bound array access. Value is undefined, but don't fold. */
3147 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3153 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3154 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3155 return fold_build1_loc (EXPR_LOCATION (t
),
3156 TREE_CODE (t
), TREE_TYPE (t
), c
);
3168 fold_const_aggregate_ref (tree t
)
3170 return fold_const_aggregate_ref_1 (t
, NULL
);
3173 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3174 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3175 KNOWN_BINFO carries the binfo describing the true type of
3176 OBJ_TYPE_REF_OBJECT(REF). */
3179 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3181 unsigned HOST_WIDE_INT offset
, size
;
3182 tree v
, fn
, vtable
, init
;
3184 vtable
= v
= BINFO_VTABLE (known_binfo
);
3185 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3189 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3191 offset
= tree_to_uhwi (TREE_OPERAND (v
, 1)) * BITS_PER_UNIT
;
3192 v
= TREE_OPERAND (v
, 0);
3197 if (TREE_CODE (v
) != ADDR_EXPR
)
3199 v
= TREE_OPERAND (v
, 0);
3201 if (TREE_CODE (v
) != VAR_DECL
3202 || !DECL_VIRTUAL_P (v
))
3204 init
= ctor_for_folding (v
);
3206 /* The virtual tables should always be born with constructors.
3207 and we always should assume that they are avaialble for
3208 folding. At the moment we do not stream them in all cases,
3209 but it should never happen that ctor seem unreachable. */
3211 if (init
== error_mark_node
)
3213 gcc_assert (in_lto_p
);
3216 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3217 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
3218 offset
+= token
* size
;
3219 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), init
,
3221 if (!fn
|| integer_zerop (fn
))
3223 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3224 || TREE_CODE (fn
) == FDESC_EXPR
);
3225 fn
= TREE_OPERAND (fn
, 0);
3226 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3228 /* When cgraph node is missing and function is not public, we cannot
3229 devirtualize. This can happen in WHOPR when the actual method
3230 ends up in other partition, because we found devirtualization
3231 possibility too late. */
3232 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3235 /* Make sure we create a cgraph node for functions we'll reference.
3236 They can be non-existent if the reference comes from an entry
3237 of an external vtable for example. */
3238 cgraph_get_create_node (fn
);
3243 /* Return true iff VAL is a gimple expression that is known to be
3244 non-negative. Restricted to floating-point inputs. */
3247 gimple_val_nonnegative_real_p (tree val
)
3251 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3253 /* Use existing logic for non-gimple trees. */
3254 if (tree_expr_nonnegative_p (val
))
3257 if (TREE_CODE (val
) != SSA_NAME
)
3260 /* Currently we look only at the immediately defining statement
3261 to make this determination, since recursion on defining
3262 statements of operands can lead to quadratic behavior in the
3263 worst case. This is expected to catch almost all occurrences
3264 in practice. It would be possible to implement limited-depth
3265 recursion if important cases are lost. Alternatively, passes
3266 that need this information (such as the pow/powi lowering code
3267 in the cse_sincos pass) could be revised to provide it through
3268 dataflow propagation. */
3270 def_stmt
= SSA_NAME_DEF_STMT (val
);
3272 if (is_gimple_assign (def_stmt
))
3276 /* See fold-const.c:tree_expr_nonnegative_p for additional
3277 cases that could be handled with recursion. */
3279 switch (gimple_assign_rhs_code (def_stmt
))
3282 /* Always true for floating-point operands. */
3286 /* True if the two operands are identical (since we are
3287 restricted to floating-point inputs). */
3288 op0
= gimple_assign_rhs1 (def_stmt
);
3289 op1
= gimple_assign_rhs2 (def_stmt
);
3292 || operand_equal_p (op0
, op1
, 0))
3299 else if (is_gimple_call (def_stmt
))
3301 tree fndecl
= gimple_call_fndecl (def_stmt
);
3303 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3307 switch (DECL_FUNCTION_CODE (fndecl
))
3309 CASE_FLT_FN (BUILT_IN_ACOS
):
3310 CASE_FLT_FN (BUILT_IN_ACOSH
):
3311 CASE_FLT_FN (BUILT_IN_CABS
):
3312 CASE_FLT_FN (BUILT_IN_COSH
):
3313 CASE_FLT_FN (BUILT_IN_ERFC
):
3314 CASE_FLT_FN (BUILT_IN_EXP
):
3315 CASE_FLT_FN (BUILT_IN_EXP10
):
3316 CASE_FLT_FN (BUILT_IN_EXP2
):
3317 CASE_FLT_FN (BUILT_IN_FABS
):
3318 CASE_FLT_FN (BUILT_IN_FDIM
):
3319 CASE_FLT_FN (BUILT_IN_HYPOT
):
3320 CASE_FLT_FN (BUILT_IN_POW10
):
3323 CASE_FLT_FN (BUILT_IN_SQRT
):
3324 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3325 nonnegative inputs. */
3326 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3331 CASE_FLT_FN (BUILT_IN_POWI
):
3332 /* True if the second argument is an even integer. */
3333 arg1
= gimple_call_arg (def_stmt
, 1);
3335 if (TREE_CODE (arg1
) == INTEGER_CST
3336 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3341 CASE_FLT_FN (BUILT_IN_POW
):
3342 /* True if the second argument is an even integer-valued
3344 arg1
= gimple_call_arg (def_stmt
, 1);
3346 if (TREE_CODE (arg1
) == REAL_CST
)
3351 c
= TREE_REAL_CST (arg1
);
3352 n
= real_to_integer (&c
);
3356 REAL_VALUE_TYPE cint
;
3357 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3358 if (real_identical (&c
, &cint
))
3374 /* Given a pointer value OP0, return a simplified version of an
3375 indirection through OP0, or NULL_TREE if no simplification is
3376 possible. Note that the resulting type may be different from
3377 the type pointed to in the sense that it is still compatible
3378 from the langhooks point of view. */
3381 gimple_fold_indirect_ref (tree t
)
3383 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
3388 subtype
= TREE_TYPE (sub
);
3389 if (!POINTER_TYPE_P (subtype
))
3392 if (TREE_CODE (sub
) == ADDR_EXPR
)
3394 tree op
= TREE_OPERAND (sub
, 0);
3395 tree optype
= TREE_TYPE (op
);
3397 if (useless_type_conversion_p (type
, optype
))
3400 /* *(foo *)&fooarray => fooarray[0] */
3401 if (TREE_CODE (optype
) == ARRAY_TYPE
3402 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
3403 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3405 tree type_domain
= TYPE_DOMAIN (optype
);
3406 tree min_val
= size_zero_node
;
3407 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
3408 min_val
= TYPE_MIN_VALUE (type_domain
);
3409 if (TREE_CODE (min_val
) == INTEGER_CST
)
3410 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
3412 /* *(foo *)&complexfoo => __real__ complexfoo */
3413 else if (TREE_CODE (optype
) == COMPLEX_TYPE
3414 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3415 return fold_build1 (REALPART_EXPR
, type
, op
);
3416 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3417 else if (TREE_CODE (optype
) == VECTOR_TYPE
3418 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3420 tree part_width
= TYPE_SIZE (type
);
3421 tree index
= bitsize_int (0);
3422 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
3426 /* *(p + CST) -> ... */
3427 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
3428 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
3430 tree addr
= TREE_OPERAND (sub
, 0);
3431 tree off
= TREE_OPERAND (sub
, 1);
3435 addrtype
= TREE_TYPE (addr
);
3437 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3438 if (TREE_CODE (addr
) == ADDR_EXPR
3439 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
3440 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
3441 && tree_fits_uhwi_p (off
))
3443 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
3444 tree part_width
= TYPE_SIZE (type
);
3445 unsigned HOST_WIDE_INT part_widthi
3446 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
3447 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
3448 tree index
= bitsize_int (indexi
);
3449 if (offset
/ part_widthi
3450 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
3451 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
3455 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3456 if (TREE_CODE (addr
) == ADDR_EXPR
3457 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
3458 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
3460 tree size
= TYPE_SIZE_UNIT (type
);
3461 if (tree_int_cst_equal (size
, off
))
3462 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
3465 /* *(p + CST) -> MEM_REF <p, CST>. */
3466 if (TREE_CODE (addr
) != ADDR_EXPR
3467 || DECL_P (TREE_OPERAND (addr
, 0)))
3468 return fold_build2 (MEM_REF
, type
,
3470 build_int_cst_wide (ptype
,
3471 TREE_INT_CST_LOW (off
),
3472 TREE_INT_CST_HIGH (off
)));
3475 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3476 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
3477 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
3478 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
3481 tree min_val
= size_zero_node
;
3483 sub
= gimple_fold_indirect_ref (sub
);
3485 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
3486 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
3487 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
3488 min_val
= TYPE_MIN_VALUE (type_domain
);
3489 if (TREE_CODE (min_val
) == INTEGER_CST
)
3490 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);