1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
48 #include "tree-ssa-propagate.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
56 /* Return true when DECL can be referenced from current unit.
57 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
58 We can get declarations that are not possible to reference for various
61 1) When analyzing C++ virtual tables.
62 C++ virtual tables do have known constructors even
63 when they are keyed to other compilation unit.
64 Those tables can contain pointers to methods and vars
65 in other units. Those methods have both STATIC and EXTERNAL
67 2) In WHOPR mode devirtualization might lead to reference
68 to method that was partitioned elsehwere.
69 In this case we have static VAR_DECL or FUNCTION_DECL
70 that has no corresponding callgraph/varpool node
72 3) COMDAT functions referred by external vtables that
73 we devirtualize only during final compilation stage.
74 At this time we already decided that we will not output
75 the function body and thus we can't reference the symbol
79 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
82 struct cgraph_node
*node
;
85 if (DECL_ABSTRACT (decl
))
88 /* We are concerned only about static/external vars and functions. */
89 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
90 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
93 /* Static objects can be referred only if they was not optimized out yet. */
94 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
96 snode
= symtab_get_node (decl
);
99 node
= dyn_cast
<cgraph_node
> (snode
);
100 return !node
|| !node
->global
.inlined_to
;
103 /* We will later output the initializer, so we can refer to it.
104 So we are concerned only when DECL comes from initializer of
107 || TREE_CODE (from_decl
) != VAR_DECL
108 || !DECL_EXTERNAL (from_decl
)
110 && symtab_get_node (from_decl
)->in_other_partition
))
112 /* We are folding reference from external vtable. The vtable may reffer
113 to a symbol keyed to other compilation unit. The other compilation
114 unit may be in separate DSO and the symbol may be hidden. */
115 if (DECL_VISIBILITY_SPECIFIED (decl
)
116 && DECL_EXTERNAL (decl
)
117 && (!(snode
= symtab_get_node (decl
)) || !snode
->in_other_partition
))
119 /* When function is public, we always can introduce new reference.
120 Exception are the COMDAT functions where introducing a direct
121 reference imply need to include function body in the curren tunit. */
122 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
124 /* We are not at ltrans stage; so don't worry about WHOPR.
125 Also when still gimplifying all referred comdat functions will be
128 As observed in PR20991 for already optimized out comdat virtual functions
129 it may be tempting to not necessarily give up because the copy will be
130 output elsewhere when corresponding vtable is output.
131 This is however not possible - ABI specify that COMDATs are output in
132 units where they are used and when the other unit was compiled with LTO
133 it is possible that vtable was kept public while the function itself
135 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
138 /* OK we are seeing either COMDAT or static variable. In this case we must
139 check that the definition is still around so we can refer it. */
140 if (TREE_CODE (decl
) == FUNCTION_DECL
)
142 node
= cgraph_get_node (decl
);
143 /* Check that we still have function body and that we didn't took
144 the decision to eliminate offline copy of the function yet.
145 The second is important when devirtualization happens during final
146 compilation stage when making a new reference no longer makes callee
148 if (!node
|| !node
->definition
|| node
->global
.inlined_to
)
150 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
154 else if (TREE_CODE (decl
) == VAR_DECL
)
156 vnode
= varpool_get_node (decl
);
157 if (!vnode
|| !vnode
->definition
)
159 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
166 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
167 acceptable form for is_gimple_min_invariant.
168 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
171 canonicalize_constructor_val (tree cval
, tree from_decl
)
173 tree orig_cval
= cval
;
175 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
176 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
178 tree ptr
= TREE_OPERAND (cval
, 0);
179 if (is_gimple_min_invariant (ptr
))
180 cval
= build1_loc (EXPR_LOCATION (cval
),
181 ADDR_EXPR
, TREE_TYPE (ptr
),
182 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
184 fold_convert (ptr_type_node
,
185 TREE_OPERAND (cval
, 1))));
187 if (TREE_CODE (cval
) == ADDR_EXPR
)
189 tree base
= NULL_TREE
;
190 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
192 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
194 TREE_OPERAND (cval
, 0) = base
;
197 base
= get_base_address (TREE_OPERAND (cval
, 0));
201 if ((TREE_CODE (base
) == VAR_DECL
202 || TREE_CODE (base
) == FUNCTION_DECL
)
203 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
205 if (TREE_CODE (base
) == VAR_DECL
)
206 TREE_ADDRESSABLE (base
) = 1;
207 else if (TREE_CODE (base
) == FUNCTION_DECL
)
209 /* Make sure we create a cgraph node for functions we'll reference.
210 They can be non-existent if the reference comes from an entry
211 of an external vtable for example. */
212 cgraph_get_create_node (base
);
214 /* Fixup types in global initializers. */
215 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
216 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
218 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
219 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
222 if (TREE_OVERFLOW_P (cval
))
223 return drop_tree_overflow (cval
);
227 /* If SYM is a constant variable with known value, return the value.
228 NULL_TREE is returned otherwise. */
231 get_symbol_constant_value (tree sym
)
233 tree val
= ctor_for_folding (sym
);
234 if (val
!= error_mark_node
)
238 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
239 if (val
&& is_gimple_min_invariant (val
))
244 /* Variables declared 'const' without an initializer
245 have zero as the initializer if they may not be
246 overridden at link or run time. */
248 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
249 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
250 return build_zero_cst (TREE_TYPE (sym
));
258 /* Subroutine of fold_stmt. We perform several simplifications of the
259 memory reference tree EXPR and make sure to re-gimplify them properly
260 after propagation of constant addresses. IS_LHS is true if the
261 reference is supposed to be an lvalue. */
264 maybe_fold_reference (tree expr
, bool is_lhs
)
269 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
270 || TREE_CODE (expr
) == REALPART_EXPR
271 || TREE_CODE (expr
) == IMAGPART_EXPR
)
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
273 return fold_unary_loc (EXPR_LOCATION (expr
),
276 TREE_OPERAND (expr
, 0));
277 else if (TREE_CODE (expr
) == BIT_FIELD_REF
278 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
279 return fold_ternary_loc (EXPR_LOCATION (expr
),
282 TREE_OPERAND (expr
, 0),
283 TREE_OPERAND (expr
, 1),
284 TREE_OPERAND (expr
, 2));
286 while (handled_component_p (*t
))
287 t
= &TREE_OPERAND (*t
, 0);
289 /* Canonicalize MEM_REFs invariant address operand. Do this first
290 to avoid feeding non-canonical MEM_REFs elsewhere. */
291 if (TREE_CODE (*t
) == MEM_REF
292 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
294 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
295 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
296 TREE_OPERAND (*t
, 0),
297 TREE_OPERAND (*t
, 1));
300 TREE_THIS_VOLATILE (tem
) = volatile_p
;
302 tem
= maybe_fold_reference (expr
, is_lhs
);
310 && (result
= fold_const_aggregate_ref (expr
))
311 && is_gimple_min_invariant (result
))
314 /* Fold back MEM_REFs to reference trees. */
315 if (TREE_CODE (*t
) == MEM_REF
316 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
317 && integer_zerop (TREE_OPERAND (*t
, 1))
318 && (TREE_THIS_VOLATILE (*t
)
319 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
320 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
321 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
322 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
323 /* We have to look out here to not drop a required conversion
324 from the rhs to the lhs if is_lhs, but we don't have the
325 rhs here to verify that. Thus require strict type
327 && types_compatible_p (TREE_TYPE (*t
),
328 TREE_TYPE (TREE_OPERAND
329 (TREE_OPERAND (*t
, 0), 0))))
332 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
333 tem
= maybe_fold_reference (expr
, is_lhs
);
338 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
340 tree tem
= maybe_fold_tmr (*t
);
344 tem
= maybe_fold_reference (expr
, is_lhs
);
355 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
356 replacement rhs for the statement or NULL_TREE if no simplification
357 could be made. It is assumed that the operands have been previously
361 fold_gimple_assign (gimple_stmt_iterator
*si
)
363 gimple stmt
= gsi_stmt (*si
);
364 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
365 location_t loc
= gimple_location (stmt
);
367 tree result
= NULL_TREE
;
369 switch (get_gimple_rhs_class (subcode
))
371 case GIMPLE_SINGLE_RHS
:
373 tree rhs
= gimple_assign_rhs1 (stmt
);
375 if (REFERENCE_CLASS_P (rhs
))
376 return maybe_fold_reference (rhs
, false);
378 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
380 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
381 if (is_gimple_min_invariant (val
))
383 else if (flag_devirtualize
&& virtual_method_call_p (val
))
386 vec
<cgraph_node
*>targets
387 = possible_polymorphic_call_targets (val
, &final
);
388 if (final
&& targets
.length () <= 1)
391 if (targets
.length () == 1)
392 fndecl
= targets
[0]->decl
;
394 fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
395 val
= fold_convert (TREE_TYPE (val
), fndecl
);
396 STRIP_USELESS_TYPE_CONVERSION (val
);
402 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
404 tree ref
= TREE_OPERAND (rhs
, 0);
405 tree tem
= maybe_fold_reference (ref
, true);
407 && TREE_CODE (tem
) == MEM_REF
408 && integer_zerop (TREE_OPERAND (tem
, 1)))
409 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
411 result
= fold_convert (TREE_TYPE (rhs
),
412 build_fold_addr_expr_loc (loc
, tem
));
413 else if (TREE_CODE (ref
) == MEM_REF
414 && integer_zerop (TREE_OPERAND (ref
, 1)))
415 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
418 else if (TREE_CODE (rhs
) == CONSTRUCTOR
419 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
420 && (CONSTRUCTOR_NELTS (rhs
)
421 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
423 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
427 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
428 if (TREE_CODE (val
) != INTEGER_CST
429 && TREE_CODE (val
) != REAL_CST
430 && TREE_CODE (val
) != FIXED_CST
)
433 return build_vector_from_ctor (TREE_TYPE (rhs
),
434 CONSTRUCTOR_ELTS (rhs
));
437 else if (DECL_P (rhs
))
438 return get_symbol_constant_value (rhs
);
440 /* If we couldn't fold the RHS, hand over to the generic
442 if (result
== NULL_TREE
)
445 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
446 that may have been added by fold, and "useless" type
447 conversions that might now be apparent due to propagation. */
448 STRIP_USELESS_TYPE_CONVERSION (result
);
450 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
457 case GIMPLE_UNARY_RHS
:
459 tree rhs
= gimple_assign_rhs1 (stmt
);
461 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
464 /* If the operation was a conversion do _not_ mark a
465 resulting constant with TREE_OVERFLOW if the original
466 constant was not. These conversions have implementation
467 defined behavior and retaining the TREE_OVERFLOW flag
468 here would confuse later passes such as VRP. */
469 if (CONVERT_EXPR_CODE_P (subcode
)
470 && TREE_CODE (result
) == INTEGER_CST
471 && TREE_CODE (rhs
) == INTEGER_CST
)
472 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
474 STRIP_USELESS_TYPE_CONVERSION (result
);
475 if (valid_gimple_rhs_p (result
))
481 case GIMPLE_BINARY_RHS
:
482 /* Try to canonicalize for boolean-typed X the comparisons
483 X == 0, X == 1, X != 0, and X != 1. */
484 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
485 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
487 tree lhs
= gimple_assign_lhs (stmt
);
488 tree op1
= gimple_assign_rhs1 (stmt
);
489 tree op2
= gimple_assign_rhs2 (stmt
);
490 tree type
= TREE_TYPE (op1
);
492 /* Check whether the comparison operands are of the same boolean
493 type as the result type is.
494 Check that second operand is an integer-constant with value
496 if (TREE_CODE (op2
) == INTEGER_CST
497 && (integer_zerop (op2
) || integer_onep (op2
))
498 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
500 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
501 bool is_logical_not
= false;
503 /* X == 0 and X != 1 is a logical-not.of X
504 X == 1 and X != 0 is X */
505 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
506 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
507 is_logical_not
= true;
509 if (is_logical_not
== false)
511 /* Only for one-bit precision typed X the transformation
512 !X -> ~X is valied. */
513 else if (TYPE_PRECISION (type
) == 1)
514 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
516 /* Otherwise we use !X -> X ^ 1. */
518 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
519 type
, op1
, build_int_cst (type
, 1));
525 result
= fold_binary_loc (loc
, subcode
,
526 TREE_TYPE (gimple_assign_lhs (stmt
)),
527 gimple_assign_rhs1 (stmt
),
528 gimple_assign_rhs2 (stmt
));
532 STRIP_USELESS_TYPE_CONVERSION (result
);
533 if (valid_gimple_rhs_p (result
))
538 case GIMPLE_TERNARY_RHS
:
539 /* Try to fold a conditional expression. */
540 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
542 tree op0
= gimple_assign_rhs1 (stmt
);
545 location_t cond_loc
= gimple_location (stmt
);
547 if (COMPARISON_CLASS_P (op0
))
549 fold_defer_overflow_warnings ();
550 tem
= fold_binary_loc (cond_loc
,
551 TREE_CODE (op0
), TREE_TYPE (op0
),
552 TREE_OPERAND (op0
, 0),
553 TREE_OPERAND (op0
, 1));
554 /* This is actually a conditional expression, not a GIMPLE
555 conditional statement, however, the valid_gimple_rhs_p
556 test still applies. */
557 set
= (tem
&& is_gimple_condexpr (tem
)
558 && valid_gimple_rhs_p (tem
));
559 fold_undefer_overflow_warnings (set
, stmt
, 0);
561 else if (is_gimple_min_invariant (op0
))
570 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
571 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
572 gimple_assign_rhs2 (stmt
),
573 gimple_assign_rhs3 (stmt
));
577 result
= fold_ternary_loc (loc
, subcode
,
578 TREE_TYPE (gimple_assign_lhs (stmt
)),
579 gimple_assign_rhs1 (stmt
),
580 gimple_assign_rhs2 (stmt
),
581 gimple_assign_rhs3 (stmt
));
585 STRIP_USELESS_TYPE_CONVERSION (result
);
586 if (valid_gimple_rhs_p (result
))
591 case GIMPLE_INVALID_RHS
:
598 /* Attempt to fold a conditional statement. Return true if any changes were
599 made. We only attempt to fold the condition expression, and do not perform
600 any transformation that would require alteration of the cfg. It is
601 assumed that the operands have been previously folded. */
604 fold_gimple_cond (gimple stmt
)
606 tree result
= fold_binary_loc (gimple_location (stmt
),
607 gimple_cond_code (stmt
),
609 gimple_cond_lhs (stmt
),
610 gimple_cond_rhs (stmt
));
614 STRIP_USELESS_TYPE_CONVERSION (result
);
615 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
617 gimple_cond_set_condition_from_tree (stmt
, result
);
625 /* Convert EXPR into a GIMPLE value suitable for substitution on the
626 RHS of an assignment. Insert the necessary statements before
627 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
628 is replaced. If the call is expected to produces a result, then it
629 is replaced by an assignment of the new RHS to the result variable.
630 If the result is to be ignored, then the call is replaced by a
631 GIMPLE_NOP. A proper VDEF chain is retained by making the first
632 VUSE and the last VDEF of the whole sequence be the same as the replaced
633 statement and using new SSA names for stores in between. */
636 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
639 gimple stmt
, new_stmt
;
640 gimple_stmt_iterator i
;
641 gimple_seq stmts
= NULL
;
645 stmt
= gsi_stmt (*si_p
);
647 gcc_assert (is_gimple_call (stmt
));
649 push_gimplify_context (gimple_in_ssa_p (cfun
));
651 lhs
= gimple_call_lhs (stmt
);
652 if (lhs
== NULL_TREE
)
654 gimplify_and_add (expr
, &stmts
);
655 /* We can end up with folding a memcpy of an empty class assignment
656 which gets optimized away by C++ gimplification. */
657 if (gimple_seq_empty_p (stmts
))
659 pop_gimplify_context (NULL
);
660 if (gimple_in_ssa_p (cfun
))
662 unlink_stmt_vdef (stmt
);
665 gsi_replace (si_p
, gimple_build_nop (), true);
671 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
672 new_stmt
= gimple_build_assign (lhs
, tmp
);
673 i
= gsi_last (stmts
);
674 gsi_insert_after_without_update (&i
, new_stmt
,
675 GSI_CONTINUE_LINKING
);
678 pop_gimplify_context (NULL
);
680 if (gimple_has_location (stmt
))
681 annotate_all_with_location (stmts
, gimple_location (stmt
));
683 /* First iterate over the replacement statements backward, assigning
684 virtual operands to their defining statements. */
686 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
688 new_stmt
= gsi_stmt (i
);
689 if ((gimple_assign_single_p (new_stmt
)
690 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
691 || (is_gimple_call (new_stmt
)
692 && (gimple_call_flags (new_stmt
)
693 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
697 vdef
= gimple_vdef (stmt
);
699 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
700 gimple_set_vdef (new_stmt
, vdef
);
701 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
702 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
703 laststore
= new_stmt
;
707 /* Second iterate over the statements forward, assigning virtual
708 operands to their uses. */
709 reaching_vuse
= gimple_vuse (stmt
);
710 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
712 new_stmt
= gsi_stmt (i
);
713 /* If the new statement possibly has a VUSE, update it with exact SSA
714 name we know will reach this one. */
715 if (gimple_has_mem_ops (new_stmt
))
716 gimple_set_vuse (new_stmt
, reaching_vuse
);
717 gimple_set_modified (new_stmt
, true);
718 if (gimple_vdef (new_stmt
))
719 reaching_vuse
= gimple_vdef (new_stmt
);
722 /* If the new sequence does not do a store release the virtual
723 definition of the original statement. */
725 && reaching_vuse
== gimple_vuse (stmt
))
727 tree vdef
= gimple_vdef (stmt
);
729 && TREE_CODE (vdef
) == SSA_NAME
)
731 unlink_stmt_vdef (stmt
);
732 release_ssa_name (vdef
);
736 /* Finally replace the original statement with the sequence. */
737 gsi_replace_with_seq (si_p
, stmts
, false);
740 /* Return the string length, maximum string length or maximum value of
742 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
743 is not NULL and, for TYPE == 0, its value is not equal to the length
744 we determine or if we are unable to determine the length or value,
745 return false. VISITED is a bitmap of visited variables.
746 TYPE is 0 if string length should be returned, 1 for maximum string
747 length and 2 for maximum value ARG can have. */
750 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
755 if (TREE_CODE (arg
) != SSA_NAME
)
757 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
758 if (TREE_CODE (arg
) == ADDR_EXPR
759 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
760 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
762 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
763 if (TREE_CODE (aop0
) == INDIRECT_REF
764 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
765 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
766 length
, visited
, type
);
772 if (TREE_CODE (val
) != INTEGER_CST
773 || tree_int_cst_sgn (val
) < 0)
777 val
= c_strlen (arg
, 1);
785 if (TREE_CODE (*length
) != INTEGER_CST
786 || TREE_CODE (val
) != INTEGER_CST
)
789 if (tree_int_cst_lt (*length
, val
))
793 else if (simple_cst_equal (val
, *length
) != 1)
801 /* If ARG is registered for SSA update we cannot look at its defining
803 if (name_registered_for_update_p (arg
))
806 /* If we were already here, break the infinite cycle. */
807 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
811 def_stmt
= SSA_NAME_DEF_STMT (var
);
813 switch (gimple_code (def_stmt
))
816 /* The RHS of the statement defining VAR must either have a
817 constant length or come from another SSA_NAME with a constant
819 if (gimple_assign_single_p (def_stmt
)
820 || gimple_assign_unary_nop_p (def_stmt
))
822 tree rhs
= gimple_assign_rhs1 (def_stmt
);
823 return get_maxval_strlen (rhs
, length
, visited
, type
);
825 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
827 tree op2
= gimple_assign_rhs2 (def_stmt
);
828 tree op3
= gimple_assign_rhs3 (def_stmt
);
829 return get_maxval_strlen (op2
, length
, visited
, type
)
830 && get_maxval_strlen (op3
, length
, visited
, type
);
836 /* All the arguments of the PHI node must have the same constant
840 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
842 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
844 /* If this PHI has itself as an argument, we cannot
845 determine the string length of this argument. However,
846 if we can find a constant string length for the other
847 PHI args then we can still be sure that this is a
848 constant string length. So be optimistic and just
849 continue with the next argument. */
850 if (arg
== gimple_phi_result (def_stmt
))
853 if (!get_maxval_strlen (arg
, length
, visited
, type
))
865 /* Fold builtin call in statement STMT. Returns a simplified tree.
866 We may return a non-constant expression, including another call
867 to a different function and with different arguments, e.g.,
868 substituting memcpy for strcpy when the string length is known.
869 Note that some builtins expand into inline code that may not
870 be valid in GIMPLE. Callers must take care. */
873 gimple_fold_builtin (gimple stmt
)
881 location_t loc
= gimple_location (stmt
);
883 ignore
= (gimple_call_lhs (stmt
) == NULL
);
885 /* First try the generic builtin folder. If that succeeds, return the
887 result
= fold_call_stmt (stmt
, ignore
);
893 result
= fold_convert (gimple_call_return_type (stmt
), result
);
897 /* Ignore MD builtins. */
898 callee
= gimple_call_fndecl (stmt
);
899 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
902 /* Give up for always_inline inline builtins until they are
904 if (avoid_folding_inline_builtin (callee
))
907 /* If the builtin could not be folded, and it has no argument list,
909 nargs
= gimple_call_num_args (stmt
);
913 /* Limit the work only for builtins we know how to simplify. */
914 switch (DECL_FUNCTION_CODE (callee
))
916 case BUILT_IN_STRLEN
:
918 case BUILT_IN_FPUTS_UNLOCKED
:
922 case BUILT_IN_STRCPY
:
923 case BUILT_IN_STRNCPY
:
924 case BUILT_IN_STRCAT
:
928 case BUILT_IN_MEMCPY_CHK
:
929 case BUILT_IN_MEMPCPY_CHK
:
930 case BUILT_IN_MEMMOVE_CHK
:
931 case BUILT_IN_MEMSET_CHK
:
932 case BUILT_IN_STRNCPY_CHK
:
933 case BUILT_IN_STPNCPY_CHK
:
937 case BUILT_IN_STRCPY_CHK
:
938 case BUILT_IN_STPCPY_CHK
:
942 case BUILT_IN_SNPRINTF_CHK
:
943 case BUILT_IN_VSNPRINTF_CHK
:
951 if (arg_idx
>= nargs
)
954 /* Try to use the dataflow information gathered by the CCP process. */
955 visited
= BITMAP_ALLOC (NULL
);
956 bitmap_clear (visited
);
958 memset (val
, 0, sizeof (val
));
959 a
= gimple_call_arg (stmt
, arg_idx
);
960 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
961 val
[arg_idx
] = NULL_TREE
;
963 BITMAP_FREE (visited
);
966 switch (DECL_FUNCTION_CODE (callee
))
968 case BUILT_IN_STRLEN
:
969 if (val
[0] && nargs
== 1)
972 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
974 /* If the result is not a valid gimple value, or not a cast
975 of a valid gimple value, then we cannot use the result. */
976 if (is_gimple_val (new_val
)
977 || (CONVERT_EXPR_P (new_val
)
978 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
983 case BUILT_IN_STRCPY
:
984 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
985 result
= fold_builtin_strcpy (loc
, callee
,
986 gimple_call_arg (stmt
, 0),
987 gimple_call_arg (stmt
, 1),
991 case BUILT_IN_STRNCPY
:
992 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
993 result
= fold_builtin_strncpy (loc
, callee
,
994 gimple_call_arg (stmt
, 0),
995 gimple_call_arg (stmt
, 1),
996 gimple_call_arg (stmt
, 2),
1000 case BUILT_IN_STRCAT
:
1001 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
1002 result
= fold_builtin_strcat (loc
, gimple_call_arg (stmt
, 0),
1003 gimple_call_arg (stmt
, 1),
1007 case BUILT_IN_FPUTS
:
1009 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1010 gimple_call_arg (stmt
, 1),
1011 ignore
, false, val
[0]);
1014 case BUILT_IN_FPUTS_UNLOCKED
:
1016 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1017 gimple_call_arg (stmt
, 1),
1018 ignore
, true, val
[0]);
1021 case BUILT_IN_MEMCPY_CHK
:
1022 case BUILT_IN_MEMPCPY_CHK
:
1023 case BUILT_IN_MEMMOVE_CHK
:
1024 case BUILT_IN_MEMSET_CHK
:
1025 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1026 result
= fold_builtin_memory_chk (loc
, callee
,
1027 gimple_call_arg (stmt
, 0),
1028 gimple_call_arg (stmt
, 1),
1029 gimple_call_arg (stmt
, 2),
1030 gimple_call_arg (stmt
, 3),
1032 DECL_FUNCTION_CODE (callee
));
1035 case BUILT_IN_STRCPY_CHK
:
1036 case BUILT_IN_STPCPY_CHK
:
1037 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1038 result
= fold_builtin_stxcpy_chk (loc
, callee
,
1039 gimple_call_arg (stmt
, 0),
1040 gimple_call_arg (stmt
, 1),
1041 gimple_call_arg (stmt
, 2),
1043 DECL_FUNCTION_CODE (callee
));
1046 case BUILT_IN_STRNCPY_CHK
:
1047 case BUILT_IN_STPNCPY_CHK
:
1048 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1049 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1050 gimple_call_arg (stmt
, 1),
1051 gimple_call_arg (stmt
, 2),
1052 gimple_call_arg (stmt
, 3),
1054 DECL_FUNCTION_CODE (callee
));
1057 case BUILT_IN_SNPRINTF_CHK
:
1058 case BUILT_IN_VSNPRINTF_CHK
:
1059 if (val
[1] && is_gimple_val (val
[1]))
1060 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1061 DECL_FUNCTION_CODE (callee
));
1068 if (result
&& ignore
)
1069 result
= fold_ignored_result (result
);
1074 /* Return a binfo to be used for devirtualization of calls based on an object
1075 represented by a declaration (i.e. a global or automatically allocated one)
1076 or NULL if it cannot be found or is not safe. CST is expected to be an
1077 ADDR_EXPR of such object or the function will return NULL. Currently it is
1078 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1079 EXPECTED_TYPE is type of the class virtual belongs to. */
1082 gimple_extract_devirt_binfo_from_cst (tree cst
, tree expected_type
)
1084 HOST_WIDE_INT offset
, size
, max_size
;
1085 tree base
, type
, binfo
;
1086 bool last_artificial
= false;
1088 if (!flag_devirtualize
1089 || TREE_CODE (cst
) != ADDR_EXPR
1090 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1093 cst
= TREE_OPERAND (cst
, 0);
1094 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1095 type
= TREE_TYPE (base
);
1099 || TREE_CODE (type
) != RECORD_TYPE
)
1102 /* Find the sub-object the constant actually refers to and mark whether it is
1103 an artificial one (as opposed to a user-defined one). */
1106 HOST_WIDE_INT pos
, size
;
1109 if (types_same_for_odr (type
, expected_type
))
1114 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1116 if (TREE_CODE (fld
) != FIELD_DECL
)
1119 pos
= int_bit_position (fld
);
1120 size
= tree_to_uhwi (DECL_SIZE (fld
));
1121 if (pos
<= offset
&& (pos
+ size
) > offset
)
1124 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1127 last_artificial
= DECL_ARTIFICIAL (fld
);
1128 type
= TREE_TYPE (fld
);
1131 /* Artificial sub-objects are ancestors, we do not want to use them for
1132 devirtualization, at least not here. */
1133 if (last_artificial
)
1135 binfo
= TYPE_BINFO (type
);
1136 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1142 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1143 The statement may be replaced by another statement, e.g., if the call
1144 simplifies to a constant value. Return true if any changes were made.
1145 It is assumed that the operands have been previously folded. */
1148 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1150 gimple stmt
= gsi_stmt (*gsi
);
1152 bool changed
= false;
1155 /* Fold *& in call arguments. */
1156 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1157 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1159 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1162 gimple_call_set_arg (stmt
, i
, tmp
);
1167 /* Check for virtual calls that became direct calls. */
1168 callee
= gimple_call_fn (stmt
);
1169 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1171 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1173 if (dump_file
&& virtual_method_call_p (callee
)
1174 && !possible_polymorphic_call_target_p
1175 (callee
, cgraph_get_node (gimple_call_addr_fndecl
1176 (OBJ_TYPE_REF_EXPR (callee
)))))
1179 "Type inheritance inconsistent devirtualization of ");
1180 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1181 fprintf (dump_file
, " to ");
1182 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
1183 fprintf (dump_file
, "\n");
1186 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1189 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
1192 vec
<cgraph_node
*>targets
1193 = possible_polymorphic_call_targets (callee
, &final
);
1194 if (final
&& targets
.length () <= 1)
1196 tree lhs
= gimple_call_lhs (stmt
);
1197 if (targets
.length () == 1)
1199 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
1201 /* If the call becomes noreturn, remove the lhs. */
1202 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
1204 if (TREE_CODE (lhs
) == SSA_NAME
)
1206 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
1207 tree def
= get_or_create_ssa_default_def (cfun
, var
);
1208 gimple new_stmt
= gimple_build_assign (lhs
, def
);
1209 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1211 gimple_call_set_lhs (stmt
, NULL_TREE
);
1216 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
1217 gimple new_stmt
= gimple_build_call (fndecl
, 0);
1218 gimple_set_location (new_stmt
, gimple_location (stmt
));
1219 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
1221 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
1222 tree def
= get_or_create_ssa_default_def (cfun
, var
);
1223 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1224 update_call_from_tree (gsi
, def
);
1227 gsi_replace (gsi
, new_stmt
, true);
1237 /* Check for builtins that CCP can handle using information not
1238 available in the generic fold routines. */
1239 if (gimple_call_builtin_p (stmt
))
1241 tree result
= gimple_fold_builtin (stmt
);
1244 if (!update_call_from_tree (gsi
, result
))
1245 gimplify_and_update_call_from_tree (gsi
, result
);
1248 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
1249 changed
|= targetm
.gimple_fold_builtin (gsi
);
1255 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1256 distinguishes both cases. */
1259 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1261 bool changed
= false;
1262 gimple stmt
= gsi_stmt (*gsi
);
1265 /* Fold the main computation performed by the statement. */
1266 switch (gimple_code (stmt
))
1270 unsigned old_num_ops
= gimple_num_ops (stmt
);
1271 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1272 tree lhs
= gimple_assign_lhs (stmt
);
1274 /* First canonicalize operand order. This avoids building new
1275 trees if this is the only thing fold would later do. */
1276 if ((commutative_tree_code (subcode
)
1277 || commutative_ternary_tree_code (subcode
))
1278 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1279 gimple_assign_rhs2 (stmt
), false))
1281 tree tem
= gimple_assign_rhs1 (stmt
);
1282 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1283 gimple_assign_set_rhs2 (stmt
, tem
);
1286 new_rhs
= fold_gimple_assign (gsi
);
1288 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1289 TREE_TYPE (new_rhs
)))
1290 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1293 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1295 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1302 changed
|= fold_gimple_cond (stmt
);
1306 changed
|= gimple_fold_call (gsi
, inplace
);
1310 /* Fold *& in asm operands. */
1313 const char **oconstraints
;
1314 const char *constraint
;
1315 bool allows_mem
, allows_reg
;
1317 noutputs
= gimple_asm_noutputs (stmt
);
1318 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1320 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1322 tree link
= gimple_asm_output_op (stmt
, i
);
1323 tree op
= TREE_VALUE (link
);
1325 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1326 if (REFERENCE_CLASS_P (op
)
1327 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1329 TREE_VALUE (link
) = op
;
1333 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1335 tree link
= gimple_asm_input_op (stmt
, i
);
1336 tree op
= TREE_VALUE (link
);
1338 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1339 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1340 oconstraints
, &allows_mem
, &allows_reg
);
1341 if (REFERENCE_CLASS_P (op
)
1342 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1345 TREE_VALUE (link
) = op
;
1353 if (gimple_debug_bind_p (stmt
))
1355 tree val
= gimple_debug_bind_get_value (stmt
);
1357 && REFERENCE_CLASS_P (val
))
1359 tree tem
= maybe_fold_reference (val
, false);
1362 gimple_debug_bind_set_value (stmt
, tem
);
1367 && TREE_CODE (val
) == ADDR_EXPR
)
1369 tree ref
= TREE_OPERAND (val
, 0);
1370 tree tem
= maybe_fold_reference (ref
, false);
1373 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1374 gimple_debug_bind_set_value (stmt
, tem
);
1384 stmt
= gsi_stmt (*gsi
);
1386 /* Fold *& on the lhs. */
1387 if (gimple_has_lhs (stmt
))
1389 tree lhs
= gimple_get_lhs (stmt
);
1390 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1392 tree new_lhs
= maybe_fold_reference (lhs
, true);
1395 gimple_set_lhs (stmt
, new_lhs
);
1404 /* Fold the statement pointed to by GSI. In some cases, this function may
1405 replace the whole statement with a new one. Returns true iff folding
1407 The statement pointed to by GSI should be in valid gimple form but may
1408 be in unfolded state as resulting from for example constant propagation
1409 which can produce *&x = 0. */
1412 fold_stmt (gimple_stmt_iterator
*gsi
)
1414 return fold_stmt_1 (gsi
, false);
1417 /* Perform the minimal folding on statement *GSI. Only operations like
1418 *&x created by constant propagation are handled. The statement cannot
1419 be replaced with a new one. Return true if the statement was
1420 changed, false otherwise.
1421 The statement *GSI should be in valid gimple form but may
1422 be in unfolded state as resulting from for example constant propagation
1423 which can produce *&x = 0. */
1426 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1428 gimple stmt
= gsi_stmt (*gsi
);
1429 bool changed
= fold_stmt_1 (gsi
, true);
1430 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1434 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1435 if EXPR is null or we don't know how.
1436 If non-null, the result always has boolean type. */
1439 canonicalize_bool (tree expr
, bool invert
)
1445 if (integer_nonzerop (expr
))
1446 return boolean_false_node
;
1447 else if (integer_zerop (expr
))
1448 return boolean_true_node
;
1449 else if (TREE_CODE (expr
) == SSA_NAME
)
1450 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1451 build_int_cst (TREE_TYPE (expr
), 0));
1452 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1453 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1455 TREE_OPERAND (expr
, 0),
1456 TREE_OPERAND (expr
, 1));
1462 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1464 if (integer_nonzerop (expr
))
1465 return boolean_true_node
;
1466 else if (integer_zerop (expr
))
1467 return boolean_false_node
;
1468 else if (TREE_CODE (expr
) == SSA_NAME
)
1469 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1470 build_int_cst (TREE_TYPE (expr
), 0));
1471 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1472 return fold_build2 (TREE_CODE (expr
),
1474 TREE_OPERAND (expr
, 0),
1475 TREE_OPERAND (expr
, 1));
1481 /* Check to see if a boolean expression EXPR is logically equivalent to the
1482 comparison (OP1 CODE OP2). Check for various identities involving
1486 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1487 const_tree op1
, const_tree op2
)
1491 /* The obvious case. */
1492 if (TREE_CODE (expr
) == code
1493 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1494 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1497 /* Check for comparing (name, name != 0) and the case where expr
1498 is an SSA_NAME with a definition matching the comparison. */
1499 if (TREE_CODE (expr
) == SSA_NAME
1500 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1502 if (operand_equal_p (expr
, op1
, 0))
1503 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1504 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1505 s
= SSA_NAME_DEF_STMT (expr
);
1506 if (is_gimple_assign (s
)
1507 && gimple_assign_rhs_code (s
) == code
1508 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1509 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1513 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1514 of name is a comparison, recurse. */
1515 if (TREE_CODE (op1
) == SSA_NAME
1516 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1518 s
= SSA_NAME_DEF_STMT (op1
);
1519 if (is_gimple_assign (s
)
1520 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1522 enum tree_code c
= gimple_assign_rhs_code (s
);
1523 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1524 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1525 return same_bool_comparison_p (expr
, c
,
1526 gimple_assign_rhs1 (s
),
1527 gimple_assign_rhs2 (s
));
1528 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1529 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1530 return same_bool_comparison_p (expr
,
1531 invert_tree_comparison (c
, false),
1532 gimple_assign_rhs1 (s
),
1533 gimple_assign_rhs2 (s
));
1539 /* Check to see if two boolean expressions OP1 and OP2 are logically
1543 same_bool_result_p (const_tree op1
, const_tree op2
)
1545 /* Simple cases first. */
1546 if (operand_equal_p (op1
, op2
, 0))
1549 /* Check the cases where at least one of the operands is a comparison.
1550 These are a bit smarter than operand_equal_p in that they apply some
1551 identifies on SSA_NAMEs. */
1552 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1553 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1554 TREE_OPERAND (op2
, 0),
1555 TREE_OPERAND (op2
, 1)))
1557 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1558 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1559 TREE_OPERAND (op1
, 0),
1560 TREE_OPERAND (op1
, 1)))
1567 /* Forward declarations for some mutually recursive functions. */
1570 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1571 enum tree_code code2
, tree op2a
, tree op2b
);
1573 and_var_with_comparison (tree var
, bool invert
,
1574 enum tree_code code2
, tree op2a
, tree op2b
);
1576 and_var_with_comparison_1 (gimple stmt
,
1577 enum tree_code code2
, tree op2a
, tree op2b
);
1579 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1580 enum tree_code code2
, tree op2a
, tree op2b
);
1582 or_var_with_comparison (tree var
, bool invert
,
1583 enum tree_code code2
, tree op2a
, tree op2b
);
1585 or_var_with_comparison_1 (gimple stmt
,
1586 enum tree_code code2
, tree op2a
, tree op2b
);
1588 /* Helper function for and_comparisons_1: try to simplify the AND of the
1589 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1590 If INVERT is true, invert the value of the VAR before doing the AND.
1591 Return NULL_EXPR if we can't simplify this to a single expression. */
1594 and_var_with_comparison (tree var
, bool invert
,
1595 enum tree_code code2
, tree op2a
, tree op2b
)
1598 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1600 /* We can only deal with variables whose definitions are assignments. */
1601 if (!is_gimple_assign (stmt
))
1604 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1605 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1606 Then we only have to consider the simpler non-inverted cases. */
1608 t
= or_var_with_comparison_1 (stmt
,
1609 invert_tree_comparison (code2
, false),
1612 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1613 return canonicalize_bool (t
, invert
);
1616 /* Try to simplify the AND of the ssa variable defined by the assignment
1617 STMT with the comparison specified by (OP2A CODE2 OP2B).
1618 Return NULL_EXPR if we can't simplify this to a single expression. */
1621 and_var_with_comparison_1 (gimple stmt
,
1622 enum tree_code code2
, tree op2a
, tree op2b
)
1624 tree var
= gimple_assign_lhs (stmt
);
1625 tree true_test_var
= NULL_TREE
;
1626 tree false_test_var
= NULL_TREE
;
1627 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1629 /* Check for identities like (var AND (var == 0)) => false. */
1630 if (TREE_CODE (op2a
) == SSA_NAME
1631 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1633 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1634 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1636 true_test_var
= op2a
;
1637 if (var
== true_test_var
)
1640 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1641 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1643 false_test_var
= op2a
;
1644 if (var
== false_test_var
)
1645 return boolean_false_node
;
1649 /* If the definition is a comparison, recurse on it. */
1650 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1652 tree t
= and_comparisons_1 (innercode
,
1653 gimple_assign_rhs1 (stmt
),
1654 gimple_assign_rhs2 (stmt
),
1662 /* If the definition is an AND or OR expression, we may be able to
1663 simplify by reassociating. */
1664 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1665 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1667 tree inner1
= gimple_assign_rhs1 (stmt
);
1668 tree inner2
= gimple_assign_rhs2 (stmt
);
1671 tree partial
= NULL_TREE
;
1672 bool is_and
= (innercode
== BIT_AND_EXPR
);
1674 /* Check for boolean identities that don't require recursive examination
1676 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1677 inner1 AND (inner1 OR inner2) => inner1
1678 !inner1 AND (inner1 AND inner2) => false
1679 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1680 Likewise for similar cases involving inner2. */
1681 if (inner1
== true_test_var
)
1682 return (is_and
? var
: inner1
);
1683 else if (inner2
== true_test_var
)
1684 return (is_and
? var
: inner2
);
1685 else if (inner1
== false_test_var
)
1687 ? boolean_false_node
1688 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1689 else if (inner2
== false_test_var
)
1691 ? boolean_false_node
1692 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1694 /* Next, redistribute/reassociate the AND across the inner tests.
1695 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1696 if (TREE_CODE (inner1
) == SSA_NAME
1697 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1698 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1699 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1700 gimple_assign_rhs1 (s
),
1701 gimple_assign_rhs2 (s
),
1702 code2
, op2a
, op2b
)))
1704 /* Handle the AND case, where we are reassociating:
1705 (inner1 AND inner2) AND (op2a code2 op2b)
1707 If the partial result t is a constant, we win. Otherwise
1708 continue on to try reassociating with the other inner test. */
1711 if (integer_onep (t
))
1713 else if (integer_zerop (t
))
1714 return boolean_false_node
;
1717 /* Handle the OR case, where we are redistributing:
1718 (inner1 OR inner2) AND (op2a code2 op2b)
1719 => (t OR (inner2 AND (op2a code2 op2b))) */
1720 else if (integer_onep (t
))
1721 return boolean_true_node
;
1723 /* Save partial result for later. */
1727 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1728 if (TREE_CODE (inner2
) == SSA_NAME
1729 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1730 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1731 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1732 gimple_assign_rhs1 (s
),
1733 gimple_assign_rhs2 (s
),
1734 code2
, op2a
, op2b
)))
1736 /* Handle the AND case, where we are reassociating:
1737 (inner1 AND inner2) AND (op2a code2 op2b)
1738 => (inner1 AND t) */
1741 if (integer_onep (t
))
1743 else if (integer_zerop (t
))
1744 return boolean_false_node
;
1745 /* If both are the same, we can apply the identity
1747 else if (partial
&& same_bool_result_p (t
, partial
))
1751 /* Handle the OR case. where we are redistributing:
1752 (inner1 OR inner2) AND (op2a code2 op2b)
1753 => (t OR (inner1 AND (op2a code2 op2b)))
1754 => (t OR partial) */
1757 if (integer_onep (t
))
1758 return boolean_true_node
;
1761 /* We already got a simplification for the other
1762 operand to the redistributed OR expression. The
1763 interesting case is when at least one is false.
1764 Or, if both are the same, we can apply the identity
1766 if (integer_zerop (partial
))
1768 else if (integer_zerop (t
))
1770 else if (same_bool_result_p (t
, partial
))
1779 /* Try to simplify the AND of two comparisons defined by
1780 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1781 If this can be done without constructing an intermediate value,
1782 return the resulting tree; otherwise NULL_TREE is returned.
1783 This function is deliberately asymmetric as it recurses on SSA_DEFs
1784 in the first comparison but not the second. */
1787 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1788 enum tree_code code2
, tree op2a
, tree op2b
)
1790 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
1792 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1793 if (operand_equal_p (op1a
, op2a
, 0)
1794 && operand_equal_p (op1b
, op2b
, 0))
1796 /* Result will be either NULL_TREE, or a combined comparison. */
1797 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1798 TRUTH_ANDIF_EXPR
, code1
, code2
,
1799 truth_type
, op1a
, op1b
);
1804 /* Likewise the swapped case of the above. */
1805 if (operand_equal_p (op1a
, op2b
, 0)
1806 && operand_equal_p (op1b
, op2a
, 0))
1808 /* Result will be either NULL_TREE, or a combined comparison. */
1809 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1810 TRUTH_ANDIF_EXPR
, code1
,
1811 swap_tree_comparison (code2
),
1812 truth_type
, op1a
, op1b
);
1817 /* If both comparisons are of the same value against constants, we might
1818 be able to merge them. */
1819 if (operand_equal_p (op1a
, op2a
, 0)
1820 && TREE_CODE (op1b
) == INTEGER_CST
1821 && TREE_CODE (op2b
) == INTEGER_CST
)
1823 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1825 /* If we have (op1a == op1b), we should either be able to
1826 return that or FALSE, depending on whether the constant op1b
1827 also satisfies the other comparison against op2b. */
1828 if (code1
== EQ_EXPR
)
1834 case EQ_EXPR
: val
= (cmp
== 0); break;
1835 case NE_EXPR
: val
= (cmp
!= 0); break;
1836 case LT_EXPR
: val
= (cmp
< 0); break;
1837 case GT_EXPR
: val
= (cmp
> 0); break;
1838 case LE_EXPR
: val
= (cmp
<= 0); break;
1839 case GE_EXPR
: val
= (cmp
>= 0); break;
1840 default: done
= false;
1845 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1847 return boolean_false_node
;
1850 /* Likewise if the second comparison is an == comparison. */
1851 else if (code2
== EQ_EXPR
)
1857 case EQ_EXPR
: val
= (cmp
== 0); break;
1858 case NE_EXPR
: val
= (cmp
!= 0); break;
1859 case LT_EXPR
: val
= (cmp
> 0); break;
1860 case GT_EXPR
: val
= (cmp
< 0); break;
1861 case LE_EXPR
: val
= (cmp
>= 0); break;
1862 case GE_EXPR
: val
= (cmp
<= 0); break;
1863 default: done
= false;
1868 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1870 return boolean_false_node
;
1874 /* Same business with inequality tests. */
1875 else if (code1
== NE_EXPR
)
1880 case EQ_EXPR
: val
= (cmp
!= 0); break;
1881 case NE_EXPR
: val
= (cmp
== 0); break;
1882 case LT_EXPR
: val
= (cmp
>= 0); break;
1883 case GT_EXPR
: val
= (cmp
<= 0); break;
1884 case LE_EXPR
: val
= (cmp
> 0); break;
1885 case GE_EXPR
: val
= (cmp
< 0); break;
1890 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1892 else if (code2
== NE_EXPR
)
1897 case EQ_EXPR
: val
= (cmp
== 0); break;
1898 case NE_EXPR
: val
= (cmp
!= 0); break;
1899 case LT_EXPR
: val
= (cmp
<= 0); break;
1900 case GT_EXPR
: val
= (cmp
>= 0); break;
1901 case LE_EXPR
: val
= (cmp
< 0); break;
1902 case GE_EXPR
: val
= (cmp
> 0); break;
1907 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1910 /* Chose the more restrictive of two < or <= comparisons. */
1911 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1912 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1914 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1915 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1917 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1920 /* Likewise chose the more restrictive of two > or >= comparisons. */
1921 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1922 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1924 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1925 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1927 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1930 /* Check for singleton ranges. */
1932 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1933 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1934 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1936 /* Check for disjoint ranges. */
1938 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1939 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1940 return boolean_false_node
;
1942 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1943 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1944 return boolean_false_node
;
1947 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1948 NAME's definition is a truth value. See if there are any simplifications
1949 that can be done against the NAME's definition. */
1950 if (TREE_CODE (op1a
) == SSA_NAME
1951 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1952 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1954 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1955 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1956 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1957 switch (gimple_code (stmt
))
1960 /* Try to simplify by copy-propagating the definition. */
1961 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1964 /* If every argument to the PHI produces the same result when
1965 ANDed with the second comparison, we win.
1966 Do not do this unless the type is bool since we need a bool
1967 result here anyway. */
1968 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1970 tree result
= NULL_TREE
;
1972 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1974 tree arg
= gimple_phi_arg_def (stmt
, i
);
1976 /* If this PHI has itself as an argument, ignore it.
1977 If all the other args produce the same result,
1979 if (arg
== gimple_phi_result (stmt
))
1981 else if (TREE_CODE (arg
) == INTEGER_CST
)
1983 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1986 result
= boolean_false_node
;
1987 else if (!integer_zerop (result
))
1991 result
= fold_build2 (code2
, boolean_type_node
,
1993 else if (!same_bool_comparison_p (result
,
1997 else if (TREE_CODE (arg
) == SSA_NAME
1998 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2001 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2002 /* In simple cases we can look through PHI nodes,
2003 but we have to be careful with loops.
2005 if (! dom_info_available_p (CDI_DOMINATORS
)
2006 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2007 || dominated_by_p (CDI_DOMINATORS
,
2008 gimple_bb (def_stmt
),
2011 temp
= and_var_with_comparison (arg
, invert
, code2
,
2017 else if (!same_bool_result_p (result
, temp
))
2033 /* Try to simplify the AND of two comparisons, specified by
2034 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2035 If this can be simplified to a single expression (without requiring
2036 introducing more SSA variables to hold intermediate values),
2037 return the resulting tree. Otherwise return NULL_TREE.
2038 If the result expression is non-null, it has boolean type. */
2041 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2042 enum tree_code code2
, tree op2a
, tree op2b
)
2044 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2048 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2051 /* Helper function for or_comparisons_1: try to simplify the OR of the
2052 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2053 If INVERT is true, invert the value of VAR before doing the OR.
2054 Return NULL_EXPR if we can't simplify this to a single expression. */
2057 or_var_with_comparison (tree var
, bool invert
,
2058 enum tree_code code2
, tree op2a
, tree op2b
)
2061 gimple stmt
= SSA_NAME_DEF_STMT (var
);
2063 /* We can only deal with variables whose definitions are assignments. */
2064 if (!is_gimple_assign (stmt
))
2067 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2068 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2069 Then we only have to consider the simpler non-inverted cases. */
2071 t
= and_var_with_comparison_1 (stmt
,
2072 invert_tree_comparison (code2
, false),
2075 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2076 return canonicalize_bool (t
, invert
);
2079 /* Try to simplify the OR of the ssa variable defined by the assignment
2080 STMT with the comparison specified by (OP2A CODE2 OP2B).
2081 Return NULL_EXPR if we can't simplify this to a single expression. */
2084 or_var_with_comparison_1 (gimple stmt
,
2085 enum tree_code code2
, tree op2a
, tree op2b
)
2087 tree var
= gimple_assign_lhs (stmt
);
2088 tree true_test_var
= NULL_TREE
;
2089 tree false_test_var
= NULL_TREE
;
2090 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2092 /* Check for identities like (var OR (var != 0)) => true . */
2093 if (TREE_CODE (op2a
) == SSA_NAME
2094 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2096 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2097 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2099 true_test_var
= op2a
;
2100 if (var
== true_test_var
)
2103 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2104 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2106 false_test_var
= op2a
;
2107 if (var
== false_test_var
)
2108 return boolean_true_node
;
2112 /* If the definition is a comparison, recurse on it. */
2113 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2115 tree t
= or_comparisons_1 (innercode
,
2116 gimple_assign_rhs1 (stmt
),
2117 gimple_assign_rhs2 (stmt
),
2125 /* If the definition is an AND or OR expression, we may be able to
2126 simplify by reassociating. */
2127 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2128 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2130 tree inner1
= gimple_assign_rhs1 (stmt
);
2131 tree inner2
= gimple_assign_rhs2 (stmt
);
2134 tree partial
= NULL_TREE
;
2135 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2137 /* Check for boolean identities that don't require recursive examination
2139 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2140 inner1 OR (inner1 AND inner2) => inner1
2141 !inner1 OR (inner1 OR inner2) => true
2142 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2144 if (inner1
== true_test_var
)
2145 return (is_or
? var
: inner1
);
2146 else if (inner2
== true_test_var
)
2147 return (is_or
? var
: inner2
);
2148 else if (inner1
== false_test_var
)
2151 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2152 else if (inner2
== false_test_var
)
2155 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2157 /* Next, redistribute/reassociate the OR across the inner tests.
2158 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2159 if (TREE_CODE (inner1
) == SSA_NAME
2160 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2161 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2162 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2163 gimple_assign_rhs1 (s
),
2164 gimple_assign_rhs2 (s
),
2165 code2
, op2a
, op2b
)))
2167 /* Handle the OR case, where we are reassociating:
2168 (inner1 OR inner2) OR (op2a code2 op2b)
2170 If the partial result t is a constant, we win. Otherwise
2171 continue on to try reassociating with the other inner test. */
2174 if (integer_onep (t
))
2175 return boolean_true_node
;
2176 else if (integer_zerop (t
))
2180 /* Handle the AND case, where we are redistributing:
2181 (inner1 AND inner2) OR (op2a code2 op2b)
2182 => (t AND (inner2 OR (op2a code op2b))) */
2183 else if (integer_zerop (t
))
2184 return boolean_false_node
;
2186 /* Save partial result for later. */
2190 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2191 if (TREE_CODE (inner2
) == SSA_NAME
2192 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2193 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2194 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2195 gimple_assign_rhs1 (s
),
2196 gimple_assign_rhs2 (s
),
2197 code2
, op2a
, op2b
)))
2199 /* Handle the OR case, where we are reassociating:
2200 (inner1 OR inner2) OR (op2a code2 op2b)
2202 => (t OR partial) */
2205 if (integer_zerop (t
))
2207 else if (integer_onep (t
))
2208 return boolean_true_node
;
2209 /* If both are the same, we can apply the identity
2211 else if (partial
&& same_bool_result_p (t
, partial
))
2215 /* Handle the AND case, where we are redistributing:
2216 (inner1 AND inner2) OR (op2a code2 op2b)
2217 => (t AND (inner1 OR (op2a code2 op2b)))
2218 => (t AND partial) */
2221 if (integer_zerop (t
))
2222 return boolean_false_node
;
2225 /* We already got a simplification for the other
2226 operand to the redistributed AND expression. The
2227 interesting case is when at least one is true.
2228 Or, if both are the same, we can apply the identity
2230 if (integer_onep (partial
))
2232 else if (integer_onep (t
))
2234 else if (same_bool_result_p (t
, partial
))
2243 /* Try to simplify the OR of two comparisons defined by
2244 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2245 If this can be done without constructing an intermediate value,
2246 return the resulting tree; otherwise NULL_TREE is returned.
2247 This function is deliberately asymmetric as it recurses on SSA_DEFs
2248 in the first comparison but not the second. */
2251 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2252 enum tree_code code2
, tree op2a
, tree op2b
)
2254 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
2256 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2257 if (operand_equal_p (op1a
, op2a
, 0)
2258 && operand_equal_p (op1b
, op2b
, 0))
2260 /* Result will be either NULL_TREE, or a combined comparison. */
2261 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2262 TRUTH_ORIF_EXPR
, code1
, code2
,
2263 truth_type
, op1a
, op1b
);
2268 /* Likewise the swapped case of the above. */
2269 if (operand_equal_p (op1a
, op2b
, 0)
2270 && operand_equal_p (op1b
, op2a
, 0))
2272 /* Result will be either NULL_TREE, or a combined comparison. */
2273 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2274 TRUTH_ORIF_EXPR
, code1
,
2275 swap_tree_comparison (code2
),
2276 truth_type
, op1a
, op1b
);
2281 /* If both comparisons are of the same value against constants, we might
2282 be able to merge them. */
2283 if (operand_equal_p (op1a
, op2a
, 0)
2284 && TREE_CODE (op1b
) == INTEGER_CST
2285 && TREE_CODE (op2b
) == INTEGER_CST
)
2287 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2289 /* If we have (op1a != op1b), we should either be able to
2290 return that or TRUE, depending on whether the constant op1b
2291 also satisfies the other comparison against op2b. */
2292 if (code1
== NE_EXPR
)
2298 case EQ_EXPR
: val
= (cmp
== 0); break;
2299 case NE_EXPR
: val
= (cmp
!= 0); break;
2300 case LT_EXPR
: val
= (cmp
< 0); break;
2301 case GT_EXPR
: val
= (cmp
> 0); break;
2302 case LE_EXPR
: val
= (cmp
<= 0); break;
2303 case GE_EXPR
: val
= (cmp
>= 0); break;
2304 default: done
= false;
2309 return boolean_true_node
;
2311 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2314 /* Likewise if the second comparison is a != comparison. */
2315 else if (code2
== NE_EXPR
)
2321 case EQ_EXPR
: val
= (cmp
== 0); break;
2322 case NE_EXPR
: val
= (cmp
!= 0); break;
2323 case LT_EXPR
: val
= (cmp
> 0); break;
2324 case GT_EXPR
: val
= (cmp
< 0); break;
2325 case LE_EXPR
: val
= (cmp
>= 0); break;
2326 case GE_EXPR
: val
= (cmp
<= 0); break;
2327 default: done
= false;
2332 return boolean_true_node
;
2334 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2338 /* See if an equality test is redundant with the other comparison. */
2339 else if (code1
== EQ_EXPR
)
2344 case EQ_EXPR
: val
= (cmp
== 0); break;
2345 case NE_EXPR
: val
= (cmp
!= 0); break;
2346 case LT_EXPR
: val
= (cmp
< 0); break;
2347 case GT_EXPR
: val
= (cmp
> 0); break;
2348 case LE_EXPR
: val
= (cmp
<= 0); break;
2349 case GE_EXPR
: val
= (cmp
>= 0); break;
2354 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2356 else if (code2
== EQ_EXPR
)
2361 case EQ_EXPR
: val
= (cmp
== 0); break;
2362 case NE_EXPR
: val
= (cmp
!= 0); break;
2363 case LT_EXPR
: val
= (cmp
> 0); break;
2364 case GT_EXPR
: val
= (cmp
< 0); break;
2365 case LE_EXPR
: val
= (cmp
>= 0); break;
2366 case GE_EXPR
: val
= (cmp
<= 0); break;
2371 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2374 /* Chose the less restrictive of two < or <= comparisons. */
2375 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2376 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2378 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2379 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2381 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2384 /* Likewise chose the less restrictive of two > or >= comparisons. */
2385 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2386 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2388 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2389 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2391 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2394 /* Check for singleton ranges. */
2396 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2397 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2398 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2400 /* Check for less/greater pairs that don't restrict the range at all. */
2402 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2403 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2404 return boolean_true_node
;
2406 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2407 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2408 return boolean_true_node
;
2411 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2412 NAME's definition is a truth value. See if there are any simplifications
2413 that can be done against the NAME's definition. */
2414 if (TREE_CODE (op1a
) == SSA_NAME
2415 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2416 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2418 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2419 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2420 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2421 switch (gimple_code (stmt
))
2424 /* Try to simplify by copy-propagating the definition. */
2425 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2428 /* If every argument to the PHI produces the same result when
2429 ORed with the second comparison, we win.
2430 Do not do this unless the type is bool since we need a bool
2431 result here anyway. */
2432 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2434 tree result
= NULL_TREE
;
2436 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2438 tree arg
= gimple_phi_arg_def (stmt
, i
);
2440 /* If this PHI has itself as an argument, ignore it.
2441 If all the other args produce the same result,
2443 if (arg
== gimple_phi_result (stmt
))
2445 else if (TREE_CODE (arg
) == INTEGER_CST
)
2447 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2450 result
= boolean_true_node
;
2451 else if (!integer_onep (result
))
2455 result
= fold_build2 (code2
, boolean_type_node
,
2457 else if (!same_bool_comparison_p (result
,
2461 else if (TREE_CODE (arg
) == SSA_NAME
2462 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2465 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2466 /* In simple cases we can look through PHI nodes,
2467 but we have to be careful with loops.
2469 if (! dom_info_available_p (CDI_DOMINATORS
)
2470 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2471 || dominated_by_p (CDI_DOMINATORS
,
2472 gimple_bb (def_stmt
),
2475 temp
= or_var_with_comparison (arg
, invert
, code2
,
2481 else if (!same_bool_result_p (result
, temp
))
2497 /* Try to simplify the OR of two comparisons, specified by
2498 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2499 If this can be simplified to a single expression (without requiring
2500 introducing more SSA variables to hold intermediate values),
2501 return the resulting tree. Otherwise return NULL_TREE.
2502 If the result expression is non-null, it has boolean type. */
2505 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2506 enum tree_code code2
, tree op2a
, tree op2b
)
2508 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2512 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2516 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2518 Either NULL_TREE, a simplified but non-constant or a constant
2521 ??? This should go into a gimple-fold-inline.h file to be eventually
2522 privatized with the single valueize function used in the various TUs
2523 to avoid the indirect function call overhead. */
2526 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2528 location_t loc
= gimple_location (stmt
);
2529 switch (gimple_code (stmt
))
2533 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2535 switch (get_gimple_rhs_class (subcode
))
2537 case GIMPLE_SINGLE_RHS
:
2539 tree rhs
= gimple_assign_rhs1 (stmt
);
2540 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2542 if (TREE_CODE (rhs
) == SSA_NAME
)
2544 /* If the RHS is an SSA_NAME, return its known constant value,
2546 return (*valueize
) (rhs
);
2548 /* Handle propagating invariant addresses into address
2550 else if (TREE_CODE (rhs
) == ADDR_EXPR
2551 && !is_gimple_min_invariant (rhs
))
2553 HOST_WIDE_INT offset
= 0;
2555 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2559 && (CONSTANT_CLASS_P (base
)
2560 || decl_address_invariant_p (base
)))
2561 return build_invariant_address (TREE_TYPE (rhs
),
2564 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2565 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2566 && (CONSTRUCTOR_NELTS (rhs
)
2567 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2572 vec
= XALLOCAVEC (tree
,
2573 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2574 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2576 val
= (*valueize
) (val
);
2577 if (TREE_CODE (val
) == INTEGER_CST
2578 || TREE_CODE (val
) == REAL_CST
2579 || TREE_CODE (val
) == FIXED_CST
)
2585 return build_vector (TREE_TYPE (rhs
), vec
);
2587 if (subcode
== OBJ_TYPE_REF
)
2589 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
2590 /* If callee is constant, we can fold away the wrapper. */
2591 if (is_gimple_min_invariant (val
))
2595 if (kind
== tcc_reference
)
2597 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2598 || TREE_CODE (rhs
) == REALPART_EXPR
2599 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2600 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2602 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2603 return fold_unary_loc (EXPR_LOCATION (rhs
),
2605 TREE_TYPE (rhs
), val
);
2607 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2608 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2610 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2611 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2613 TREE_TYPE (rhs
), val
,
2614 TREE_OPERAND (rhs
, 1),
2615 TREE_OPERAND (rhs
, 2));
2617 else if (TREE_CODE (rhs
) == MEM_REF
2618 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2620 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2621 if (TREE_CODE (val
) == ADDR_EXPR
2622 && is_gimple_min_invariant (val
))
2624 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2626 TREE_OPERAND (rhs
, 1));
2631 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2633 else if (kind
== tcc_declaration
)
2634 return get_symbol_constant_value (rhs
);
2638 case GIMPLE_UNARY_RHS
:
2640 /* Handle unary operators that can appear in GIMPLE form.
2641 Note that we know the single operand must be a constant,
2642 so this should almost always return a simplified RHS. */
2643 tree lhs
= gimple_assign_lhs (stmt
);
2644 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2646 /* Conversions are useless for CCP purposes if they are
2647 value-preserving. Thus the restrictions that
2648 useless_type_conversion_p places for restrict qualification
2649 of pointer types should not apply here.
2650 Substitution later will only substitute to allowed places. */
2651 if (CONVERT_EXPR_CODE_P (subcode
)
2652 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2653 && POINTER_TYPE_P (TREE_TYPE (op0
))
2654 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2655 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2656 && TYPE_MODE (TREE_TYPE (lhs
))
2657 == TYPE_MODE (TREE_TYPE (op0
)))
2661 fold_unary_ignore_overflow_loc (loc
, subcode
,
2662 gimple_expr_type (stmt
), op0
);
2665 case GIMPLE_BINARY_RHS
:
2667 /* Handle binary operators that can appear in GIMPLE form. */
2668 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2669 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2671 /* Translate &x + CST into an invariant form suitable for
2672 further propagation. */
2673 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2674 && TREE_CODE (op0
) == ADDR_EXPR
2675 && TREE_CODE (op1
) == INTEGER_CST
)
2677 tree off
= fold_convert (ptr_type_node
, op1
);
2678 return build_fold_addr_expr_loc
2680 fold_build2 (MEM_REF
,
2681 TREE_TYPE (TREE_TYPE (op0
)),
2682 unshare_expr (op0
), off
));
2685 return fold_binary_loc (loc
, subcode
,
2686 gimple_expr_type (stmt
), op0
, op1
);
2689 case GIMPLE_TERNARY_RHS
:
2691 /* Handle ternary operators that can appear in GIMPLE form. */
2692 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2693 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2694 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2696 /* Fold embedded expressions in ternary codes. */
2697 if ((subcode
== COND_EXPR
2698 || subcode
== VEC_COND_EXPR
)
2699 && COMPARISON_CLASS_P (op0
))
2701 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2702 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2703 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2704 TREE_TYPE (op0
), op00
, op01
);
2709 return fold_ternary_loc (loc
, subcode
,
2710 gimple_expr_type (stmt
), op0
, op1
, op2
);
2722 if (gimple_call_internal_p (stmt
))
2724 enum tree_code subcode
= ERROR_MARK
;
2725 switch (gimple_call_internal_fn (stmt
))
2727 case IFN_UBSAN_CHECK_ADD
:
2728 subcode
= PLUS_EXPR
;
2730 case IFN_UBSAN_CHECK_SUB
:
2731 subcode
= MINUS_EXPR
;
2733 case IFN_UBSAN_CHECK_MUL
:
2734 subcode
= MULT_EXPR
;
2739 tree op0
= (*valueize
) (gimple_call_arg (stmt
, 0));
2740 tree op1
= (*valueize
) (gimple_call_arg (stmt
, 1));
2742 if (TREE_CODE (op0
) != INTEGER_CST
2743 || TREE_CODE (op1
) != INTEGER_CST
)
2745 tree res
= fold_binary_loc (loc
, subcode
,
2746 TREE_TYPE (gimple_call_arg (stmt
, 0)),
2749 && TREE_CODE (res
) == INTEGER_CST
2750 && !TREE_OVERFLOW (res
))
2755 fn
= (*valueize
) (gimple_call_fn (stmt
));
2756 if (TREE_CODE (fn
) == ADDR_EXPR
2757 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2758 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
2759 && gimple_builtin_call_types_compatible_p (stmt
,
2760 TREE_OPERAND (fn
, 0)))
2762 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2765 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2766 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2767 call
= build_call_array_loc (loc
,
2768 gimple_call_return_type (stmt
),
2769 fn
, gimple_call_num_args (stmt
), args
);
2770 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2773 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2774 STRIP_NOPS (retval
);
2775 retval
= fold_convert (gimple_call_return_type (stmt
), retval
);
2787 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2788 Returns NULL_TREE if folding to a constant is not possible, otherwise
2789 returns a constant according to is_gimple_min_invariant. */
2792 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2794 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2795 if (res
&& is_gimple_min_invariant (res
))
2801 /* The following set of functions are supposed to fold references using
2802 their constant initializers. */
2804 static tree
fold_ctor_reference (tree type
, tree ctor
,
2805 unsigned HOST_WIDE_INT offset
,
2806 unsigned HOST_WIDE_INT size
, tree
);
2808 /* See if we can find constructor defining value of BASE.
2809 When we know the consructor with constant offset (such as
2810 base is array[40] and we do know constructor of array), then
2811 BIT_OFFSET is adjusted accordingly.
2813 As a special case, return error_mark_node when constructor
2814 is not explicitly available, but it is known to be zero
2815 such as 'static const int a;'. */
2817 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2818 tree (*valueize
)(tree
))
2820 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2821 if (TREE_CODE (base
) == MEM_REF
)
2823 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2825 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
2827 *bit_offset
+= (mem_ref_offset (base
).low
2832 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2833 base
= valueize (TREE_OPERAND (base
, 0));
2834 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2836 base
= TREE_OPERAND (base
, 0);
2839 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2840 DECL_INITIAL. If BASE is a nested reference into another
2841 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2842 the inner reference. */
2843 switch (TREE_CODE (base
))
2848 tree init
= ctor_for_folding (base
);
2850 /* Our semantic is exact opposite of ctor_for_folding;
2851 NULL means unknown, while error_mark_node is 0. */
2852 if (init
== error_mark_node
)
2855 return error_mark_node
;
2861 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2862 if (max_size
== -1 || size
!= max_size
)
2864 *bit_offset
+= bit_offset2
;
2865 return get_base_constructor (base
, bit_offset
, valueize
);
2876 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2877 to the memory at bit OFFSET.
2879 We do only simple job of folding byte accesses. */
2882 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2883 unsigned HOST_WIDE_INT offset
,
2884 unsigned HOST_WIDE_INT size
)
2886 if (INTEGRAL_TYPE_P (type
)
2887 && (TYPE_MODE (type
)
2888 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2889 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2891 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2892 && size
== BITS_PER_UNIT
2893 && !(offset
% BITS_PER_UNIT
))
2895 offset
/= BITS_PER_UNIT
;
2896 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2897 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2900 const char a[20]="hello";
2903 might lead to offset greater than string length. In this case we
2904 know value is either initialized to 0 or out of bounds. Return 0
2906 return build_zero_cst (type
);
2911 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2912 SIZE to the memory at bit OFFSET. */
2915 fold_array_ctor_reference (tree type
, tree ctor
,
2916 unsigned HOST_WIDE_INT offset
,
2917 unsigned HOST_WIDE_INT size
,
2920 unsigned HOST_WIDE_INT cnt
;
2922 double_int low_bound
, elt_size
;
2923 double_int index
, max_index
;
2924 double_int access_index
;
2925 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2926 HOST_WIDE_INT inner_offset
;
2928 /* Compute low bound and elt size. */
2929 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2930 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2931 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2933 /* Static constructors for variably sized objects makes no sense. */
2934 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2935 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2936 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2939 low_bound
= double_int_zero
;
2940 /* Static constructors for variably sized objects makes no sense. */
2941 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2944 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2947 /* We can handle only constantly sized accesses that are known to not
2948 be larger than size of array element. */
2949 if (!TYPE_SIZE_UNIT (type
)
2950 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2951 || elt_size
.slt (tree_to_double_int (TYPE_SIZE_UNIT (type
)))
2952 || elt_size
.is_zero ())
2955 /* Compute the array index we look for. */
2956 access_index
= double_int::from_uhwi (offset
/ BITS_PER_UNIT
)
2957 .udiv (elt_size
, TRUNC_DIV_EXPR
);
2958 access_index
+= low_bound
;
2960 access_index
= access_index
.ext (TYPE_PRECISION (index_type
),
2961 TYPE_UNSIGNED (index_type
));
2963 /* And offset within the access. */
2964 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
2966 /* See if the array field is large enough to span whole access. We do not
2967 care to fold accesses spanning multiple array indexes. */
2968 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
2971 index
= low_bound
- double_int_one
;
2973 index
= index
.ext (TYPE_PRECISION (index_type
), TYPE_UNSIGNED (index_type
));
2975 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2977 /* Array constructor might explicitely set index, or specify range
2978 or leave index NULL meaning that it is next index after previous
2982 if (TREE_CODE (cfield
) == INTEGER_CST
)
2983 max_index
= index
= tree_to_double_int (cfield
);
2986 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2987 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2988 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2993 index
+= double_int_one
;
2995 index
= index
.ext (TYPE_PRECISION (index_type
),
2996 TYPE_UNSIGNED (index_type
));
3000 /* Do we have match? */
3001 if (access_index
.cmp (index
, 1) >= 0
3002 && access_index
.cmp (max_index
, 1) <= 0)
3003 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
3006 /* When memory is not explicitely mentioned in constructor,
3007 it is 0 (or out of range). */
3008 return build_zero_cst (type
);
3011 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3012 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3015 fold_nonarray_ctor_reference (tree type
, tree ctor
,
3016 unsigned HOST_WIDE_INT offset
,
3017 unsigned HOST_WIDE_INT size
,
3020 unsigned HOST_WIDE_INT cnt
;
3023 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
3026 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
3027 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
3028 tree field_size
= DECL_SIZE (cfield
);
3029 double_int bitoffset
;
3030 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
3031 double_int bits_per_unit_cst
= double_int::from_uhwi (BITS_PER_UNIT
);
3032 double_int bitoffset_end
, access_end
;
3034 /* Variable sized objects in static constructors makes no sense,
3035 but field_size can be NULL for flexible array members. */
3036 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
3037 && TREE_CODE (byte_offset
) == INTEGER_CST
3038 && (field_size
!= NULL_TREE
3039 ? TREE_CODE (field_size
) == INTEGER_CST
3040 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
3042 /* Compute bit offset of the field. */
3043 bitoffset
= tree_to_double_int (field_offset
)
3044 + byte_offset_cst
* bits_per_unit_cst
;
3045 /* Compute bit offset where the field ends. */
3046 if (field_size
!= NULL_TREE
)
3047 bitoffset_end
= bitoffset
+ tree_to_double_int (field_size
);
3049 bitoffset_end
= double_int_zero
;
3051 access_end
= double_int::from_uhwi (offset
)
3052 + double_int::from_uhwi (size
);
3054 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3055 [BITOFFSET, BITOFFSET_END)? */
3056 if (access_end
.cmp (bitoffset
, 0) > 0
3057 && (field_size
== NULL_TREE
3058 || double_int::from_uhwi (offset
).slt (bitoffset_end
)))
3060 double_int inner_offset
= double_int::from_uhwi (offset
) - bitoffset
;
3061 /* We do have overlap. Now see if field is large enough to
3062 cover the access. Give up for accesses spanning multiple
3064 if (access_end
.cmp (bitoffset_end
, 0) > 0)
3066 if (double_int::from_uhwi (offset
).slt (bitoffset
))
3068 return fold_ctor_reference (type
, cval
,
3069 inner_offset
.to_uhwi (), size
,
3073 /* When memory is not explicitely mentioned in constructor, it is 0. */
3074 return build_zero_cst (type
);
3077 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3078 to the memory at bit OFFSET. */
3081 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
3082 unsigned HOST_WIDE_INT size
, tree from_decl
)
3086 /* We found the field with exact match. */
3087 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
3089 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
3091 /* We are at the end of walk, see if we can view convert the
3093 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
3094 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3095 && operand_equal_p (TYPE_SIZE (type
),
3096 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
3098 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
3099 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
3104 if (TREE_CODE (ctor
) == STRING_CST
)
3105 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
3106 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
3109 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
3110 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
3111 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
3114 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
3121 /* Return the tree representing the element referenced by T if T is an
3122 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3123 names using VALUEIZE. Return NULL_TREE otherwise. */
3126 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
3128 tree ctor
, idx
, base
;
3129 HOST_WIDE_INT offset
, size
, max_size
;
3132 if (TREE_THIS_VOLATILE (t
))
3135 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3136 return get_symbol_constant_value (t
);
3138 tem
= fold_read_from_constant_string (t
);
3142 switch (TREE_CODE (t
))
3145 case ARRAY_RANGE_REF
:
3146 /* Constant indexes are handled well by get_base_constructor.
3147 Only special case variable offsets.
3148 FIXME: This code can't handle nested references with variable indexes
3149 (they will be handled only by iteration of ccp). Perhaps we can bring
3150 get_ref_base_and_extent here and make it use a valueize callback. */
3151 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3153 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3154 && TREE_CODE (idx
) == INTEGER_CST
)
3156 tree low_bound
, unit_size
;
3159 /* If the resulting bit-offset is constant, track it. */
3160 if ((low_bound
= array_ref_low_bound (t
),
3161 TREE_CODE (low_bound
) == INTEGER_CST
)
3162 && (unit_size
= array_ref_element_size (t
),
3163 tree_fits_uhwi_p (unit_size
))
3164 && (doffset
= (TREE_INT_CST (idx
) - TREE_INT_CST (low_bound
))
3165 .sext (TYPE_PRECISION (TREE_TYPE (idx
))),
3166 doffset
.fits_shwi ()))
3168 offset
= doffset
.to_shwi ();
3169 offset
*= tree_to_uhwi (unit_size
);
3170 offset
*= BITS_PER_UNIT
;
3172 base
= TREE_OPERAND (t
, 0);
3173 ctor
= get_base_constructor (base
, &offset
, valueize
);
3174 /* Empty constructor. Always fold to 0. */
3175 if (ctor
== error_mark_node
)
3176 return build_zero_cst (TREE_TYPE (t
));
3177 /* Out of bound array access. Value is undefined,
3181 /* We can not determine ctor. */
3184 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3185 tree_to_uhwi (unit_size
)
3194 case TARGET_MEM_REF
:
3196 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3197 ctor
= get_base_constructor (base
, &offset
, valueize
);
3199 /* Empty constructor. Always fold to 0. */
3200 if (ctor
== error_mark_node
)
3201 return build_zero_cst (TREE_TYPE (t
));
3202 /* We do not know precise address. */
3203 if (max_size
== -1 || max_size
!= size
)
3205 /* We can not determine ctor. */
3209 /* Out of bound array access. Value is undefined, but don't fold. */
3213 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3219 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3220 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3221 return fold_build1_loc (EXPR_LOCATION (t
),
3222 TREE_CODE (t
), TREE_TYPE (t
), c
);
3234 fold_const_aggregate_ref (tree t
)
3236 return fold_const_aggregate_ref_1 (t
, NULL
);
3239 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3240 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3241 KNOWN_BINFO carries the binfo describing the true type of
3242 OBJ_TYPE_REF_OBJECT(REF). */
3245 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3247 unsigned HOST_WIDE_INT offset
, size
;
3248 tree v
, fn
, vtable
, init
;
3250 vtable
= v
= BINFO_VTABLE (known_binfo
);
3251 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3255 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3257 offset
= tree_to_uhwi (TREE_OPERAND (v
, 1)) * BITS_PER_UNIT
;
3258 v
= TREE_OPERAND (v
, 0);
3263 if (TREE_CODE (v
) != ADDR_EXPR
)
3265 v
= TREE_OPERAND (v
, 0);
3267 if (TREE_CODE (v
) != VAR_DECL
3268 || !DECL_VIRTUAL_P (v
))
3270 init
= ctor_for_folding (v
);
3272 /* The virtual tables should always be born with constructors.
3273 and we always should assume that they are avaialble for
3274 folding. At the moment we do not stream them in all cases,
3275 but it should never happen that ctor seem unreachable. */
3277 if (init
== error_mark_node
)
3279 gcc_assert (in_lto_p
);
3282 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3283 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
3284 offset
+= token
* size
;
3285 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), init
,
3287 if (!fn
|| integer_zerop (fn
))
3289 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3290 || TREE_CODE (fn
) == FDESC_EXPR
);
3291 fn
= TREE_OPERAND (fn
, 0);
3292 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3294 /* When cgraph node is missing and function is not public, we cannot
3295 devirtualize. This can happen in WHOPR when the actual method
3296 ends up in other partition, because we found devirtualization
3297 possibility too late. */
3298 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3301 /* Make sure we create a cgraph node for functions we'll reference.
3302 They can be non-existent if the reference comes from an entry
3303 of an external vtable for example. */
3304 cgraph_get_create_node (fn
);
3309 /* Return true iff VAL is a gimple expression that is known to be
3310 non-negative. Restricted to floating-point inputs. */
3313 gimple_val_nonnegative_real_p (tree val
)
3317 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3319 /* Use existing logic for non-gimple trees. */
3320 if (tree_expr_nonnegative_p (val
))
3323 if (TREE_CODE (val
) != SSA_NAME
)
3326 /* Currently we look only at the immediately defining statement
3327 to make this determination, since recursion on defining
3328 statements of operands can lead to quadratic behavior in the
3329 worst case. This is expected to catch almost all occurrences
3330 in practice. It would be possible to implement limited-depth
3331 recursion if important cases are lost. Alternatively, passes
3332 that need this information (such as the pow/powi lowering code
3333 in the cse_sincos pass) could be revised to provide it through
3334 dataflow propagation. */
3336 def_stmt
= SSA_NAME_DEF_STMT (val
);
3338 if (is_gimple_assign (def_stmt
))
3342 /* See fold-const.c:tree_expr_nonnegative_p for additional
3343 cases that could be handled with recursion. */
3345 switch (gimple_assign_rhs_code (def_stmt
))
3348 /* Always true for floating-point operands. */
3352 /* True if the two operands are identical (since we are
3353 restricted to floating-point inputs). */
3354 op0
= gimple_assign_rhs1 (def_stmt
);
3355 op1
= gimple_assign_rhs2 (def_stmt
);
3358 || operand_equal_p (op0
, op1
, 0))
3365 else if (is_gimple_call (def_stmt
))
3367 tree fndecl
= gimple_call_fndecl (def_stmt
);
3369 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3373 switch (DECL_FUNCTION_CODE (fndecl
))
3375 CASE_FLT_FN (BUILT_IN_ACOS
):
3376 CASE_FLT_FN (BUILT_IN_ACOSH
):
3377 CASE_FLT_FN (BUILT_IN_CABS
):
3378 CASE_FLT_FN (BUILT_IN_COSH
):
3379 CASE_FLT_FN (BUILT_IN_ERFC
):
3380 CASE_FLT_FN (BUILT_IN_EXP
):
3381 CASE_FLT_FN (BUILT_IN_EXP10
):
3382 CASE_FLT_FN (BUILT_IN_EXP2
):
3383 CASE_FLT_FN (BUILT_IN_FABS
):
3384 CASE_FLT_FN (BUILT_IN_FDIM
):
3385 CASE_FLT_FN (BUILT_IN_HYPOT
):
3386 CASE_FLT_FN (BUILT_IN_POW10
):
3389 CASE_FLT_FN (BUILT_IN_SQRT
):
3390 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3391 nonnegative inputs. */
3392 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3397 CASE_FLT_FN (BUILT_IN_POWI
):
3398 /* True if the second argument is an even integer. */
3399 arg1
= gimple_call_arg (def_stmt
, 1);
3401 if (TREE_CODE (arg1
) == INTEGER_CST
3402 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3407 CASE_FLT_FN (BUILT_IN_POW
):
3408 /* True if the second argument is an even integer-valued
3410 arg1
= gimple_call_arg (def_stmt
, 1);
3412 if (TREE_CODE (arg1
) == REAL_CST
)
3417 c
= TREE_REAL_CST (arg1
);
3418 n
= real_to_integer (&c
);
3422 REAL_VALUE_TYPE cint
;
3423 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3424 if (real_identical (&c
, &cint
))
3440 /* Given a pointer value OP0, return a simplified version of an
3441 indirection through OP0, or NULL_TREE if no simplification is
3442 possible. Note that the resulting type may be different from
3443 the type pointed to in the sense that it is still compatible
3444 from the langhooks point of view. */
3447 gimple_fold_indirect_ref (tree t
)
3449 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
3454 subtype
= TREE_TYPE (sub
);
3455 if (!POINTER_TYPE_P (subtype
))
3458 if (TREE_CODE (sub
) == ADDR_EXPR
)
3460 tree op
= TREE_OPERAND (sub
, 0);
3461 tree optype
= TREE_TYPE (op
);
3463 if (useless_type_conversion_p (type
, optype
))
3466 /* *(foo *)&fooarray => fooarray[0] */
3467 if (TREE_CODE (optype
) == ARRAY_TYPE
3468 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
3469 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3471 tree type_domain
= TYPE_DOMAIN (optype
);
3472 tree min_val
= size_zero_node
;
3473 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
3474 min_val
= TYPE_MIN_VALUE (type_domain
);
3475 if (TREE_CODE (min_val
) == INTEGER_CST
)
3476 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
3478 /* *(foo *)&complexfoo => __real__ complexfoo */
3479 else if (TREE_CODE (optype
) == COMPLEX_TYPE
3480 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3481 return fold_build1 (REALPART_EXPR
, type
, op
);
3482 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3483 else if (TREE_CODE (optype
) == VECTOR_TYPE
3484 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3486 tree part_width
= TYPE_SIZE (type
);
3487 tree index
= bitsize_int (0);
3488 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
3492 /* *(p + CST) -> ... */
3493 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
3494 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
3496 tree addr
= TREE_OPERAND (sub
, 0);
3497 tree off
= TREE_OPERAND (sub
, 1);
3501 addrtype
= TREE_TYPE (addr
);
3503 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3504 if (TREE_CODE (addr
) == ADDR_EXPR
3505 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
3506 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
3507 && tree_fits_uhwi_p (off
))
3509 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
3510 tree part_width
= TYPE_SIZE (type
);
3511 unsigned HOST_WIDE_INT part_widthi
3512 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
3513 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
3514 tree index
= bitsize_int (indexi
);
3515 if (offset
/ part_widthi
3516 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
3517 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
3521 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3522 if (TREE_CODE (addr
) == ADDR_EXPR
3523 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
3524 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
3526 tree size
= TYPE_SIZE_UNIT (type
);
3527 if (tree_int_cst_equal (size
, off
))
3528 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
3531 /* *(p + CST) -> MEM_REF <p, CST>. */
3532 if (TREE_CODE (addr
) != ADDR_EXPR
3533 || DECL_P (TREE_OPERAND (addr
, 0)))
3534 return fold_build2 (MEM_REF
, type
,
3536 build_int_cst_wide (ptype
,
3537 TREE_INT_CST_LOW (off
),
3538 TREE_INT_CST_HIGH (off
)));
3541 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3542 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
3543 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
3544 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
3547 tree min_val
= size_zero_node
;
3549 sub
= gimple_fold_indirect_ref (sub
);
3551 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
3552 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
3553 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
3554 min_val
= TYPE_MIN_VALUE (type_domain
);
3555 if (TREE_CODE (min_val
) == INTEGER_CST
)
3556 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
3562 /* Return true if CODE is an operation that when operating on signed
3563 integer types involves undefined behavior on overflow and the
3564 operation can be expressed with unsigned arithmetic. */
3567 arith_code_with_undefined_signed_overflow (tree_code code
)
3575 case POINTER_PLUS_EXPR
:
3582 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
3583 operation that can be transformed to unsigned arithmetic by converting
3584 its operand, carrying out the operation in the corresponding unsigned
3585 type and converting the result back to the original type.
3587 Returns a sequence of statements that replace STMT and also contain
3588 a modified form of STMT itself. */
3591 rewrite_to_defined_overflow (gimple stmt
)
3593 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3595 fprintf (dump_file
, "rewriting stmt with undefined signed "
3597 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3600 tree lhs
= gimple_assign_lhs (stmt
);
3601 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
3602 gimple_seq stmts
= NULL
;
3603 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
3605 gimple_seq stmts2
= NULL
;
3606 gimple_set_op (stmt
, i
,
3607 force_gimple_operand (fold_convert (type
,
3608 gimple_op (stmt
, i
)),
3609 &stmts2
, true, NULL_TREE
));
3610 gimple_seq_add_seq (&stmts
, stmts2
);
3612 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
3613 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
3614 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
3615 gimple_seq_add_stmt (&stmts
, stmt
);
3616 gimple cvt
= gimple_build_assign_with_ops
3617 (NOP_EXPR
, lhs
, gimple_assign_lhs (stmt
), NULL_TREE
);
3618 gimple_seq_add_stmt (&stmts
, cvt
);