1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
48 #include "tree-ssa-propagate.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
56 /* Return true when DECL can be referenced from current unit.
57 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
58 We can get declarations that are not possible to reference for various
61 1) When analyzing C++ virtual tables.
62 C++ virtual tables do have known constructors even
63 when they are keyed to other compilation unit.
64 Those tables can contain pointers to methods and vars
65 in other units. Those methods have both STATIC and EXTERNAL
67 2) In WHOPR mode devirtualization might lead to reference
68 to method that was partitioned elsehwere.
69 In this case we have static VAR_DECL or FUNCTION_DECL
70 that has no corresponding callgraph/varpool node
72 3) COMDAT functions referred by external vtables that
73 we devirtualize only during final compilation stage.
74 At this time we already decided that we will not output
75 the function body and thus we can't reference the symbol
79 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
82 struct cgraph_node
*node
;
85 if (DECL_ABSTRACT (decl
))
88 /* We are concerned only about static/external vars and functions. */
89 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
90 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
93 /* Static objects can be referred only if they was not optimized out yet. */
94 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
96 snode
= symtab_get_node (decl
);
99 node
= dyn_cast
<cgraph_node
> (snode
);
100 return !node
|| !node
->global
.inlined_to
;
103 /* We will later output the initializer, so we can refer to it.
104 So we are concerned only when DECL comes from initializer of
107 || TREE_CODE (from_decl
) != VAR_DECL
108 || !DECL_EXTERNAL (from_decl
)
110 && symtab_get_node (from_decl
)->in_other_partition
))
112 /* We are folding reference from external vtable. The vtable may reffer
113 to a symbol keyed to other compilation unit. The other compilation
114 unit may be in separate DSO and the symbol may be hidden. */
115 if (DECL_VISIBILITY_SPECIFIED (decl
)
116 && DECL_EXTERNAL (decl
)
117 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
118 && (!(snode
= symtab_get_node (decl
)) || !snode
->in_other_partition
))
120 /* When function is public, we always can introduce new reference.
121 Exception are the COMDAT functions where introducing a direct
122 reference imply need to include function body in the curren tunit. */
123 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
125 /* We are not at ltrans stage; so don't worry about WHOPR.
126 Also when still gimplifying all referred comdat functions will be
129 As observed in PR20991 for already optimized out comdat virtual functions
130 it may be tempting to not necessarily give up because the copy will be
131 output elsewhere when corresponding vtable is output.
132 This is however not possible - ABI specify that COMDATs are output in
133 units where they are used and when the other unit was compiled with LTO
134 it is possible that vtable was kept public while the function itself
136 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
139 /* OK we are seeing either COMDAT or static variable. In this case we must
140 check that the definition is still around so we can refer it. */
141 if (TREE_CODE (decl
) == FUNCTION_DECL
)
143 node
= cgraph_get_node (decl
);
144 /* Check that we still have function body and that we didn't took
145 the decision to eliminate offline copy of the function yet.
146 The second is important when devirtualization happens during final
147 compilation stage when making a new reference no longer makes callee
149 if (!node
|| !node
->definition
|| node
->global
.inlined_to
)
151 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
155 else if (TREE_CODE (decl
) == VAR_DECL
)
157 vnode
= varpool_get_node (decl
);
158 if (!vnode
|| !vnode
->definition
)
160 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
167 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
168 acceptable form for is_gimple_min_invariant.
169 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
172 canonicalize_constructor_val (tree cval
, tree from_decl
)
174 tree orig_cval
= cval
;
176 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
177 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
179 tree ptr
= TREE_OPERAND (cval
, 0);
180 if (is_gimple_min_invariant (ptr
))
181 cval
= build1_loc (EXPR_LOCATION (cval
),
182 ADDR_EXPR
, TREE_TYPE (ptr
),
183 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
185 fold_convert (ptr_type_node
,
186 TREE_OPERAND (cval
, 1))));
188 if (TREE_CODE (cval
) == ADDR_EXPR
)
190 tree base
= NULL_TREE
;
191 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
193 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
195 TREE_OPERAND (cval
, 0) = base
;
198 base
= get_base_address (TREE_OPERAND (cval
, 0));
202 if ((TREE_CODE (base
) == VAR_DECL
203 || TREE_CODE (base
) == FUNCTION_DECL
)
204 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
206 if (TREE_CODE (base
) == VAR_DECL
)
207 TREE_ADDRESSABLE (base
) = 1;
208 else if (TREE_CODE (base
) == FUNCTION_DECL
)
210 /* Make sure we create a cgraph node for functions we'll reference.
211 They can be non-existent if the reference comes from an entry
212 of an external vtable for example. */
213 cgraph_get_create_node (base
);
215 /* Fixup types in global initializers. */
216 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
217 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
219 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
220 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
223 if (TREE_OVERFLOW_P (cval
))
224 return drop_tree_overflow (cval
);
228 /* If SYM is a constant variable with known value, return the value.
229 NULL_TREE is returned otherwise. */
232 get_symbol_constant_value (tree sym
)
234 tree val
= ctor_for_folding (sym
);
235 if (val
!= error_mark_node
)
239 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
240 if (val
&& is_gimple_min_invariant (val
))
245 /* Variables declared 'const' without an initializer
246 have zero as the initializer if they may not be
247 overridden at link or run time. */
249 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
250 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
251 return build_zero_cst (TREE_TYPE (sym
));
259 /* Subroutine of fold_stmt. We perform several simplifications of the
260 memory reference tree EXPR and make sure to re-gimplify them properly
261 after propagation of constant addresses. IS_LHS is true if the
262 reference is supposed to be an lvalue. */
265 maybe_fold_reference (tree expr
, bool is_lhs
)
270 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
271 || TREE_CODE (expr
) == REALPART_EXPR
272 || TREE_CODE (expr
) == IMAGPART_EXPR
)
273 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
274 return fold_unary_loc (EXPR_LOCATION (expr
),
277 TREE_OPERAND (expr
, 0));
278 else if (TREE_CODE (expr
) == BIT_FIELD_REF
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
280 return fold_ternary_loc (EXPR_LOCATION (expr
),
283 TREE_OPERAND (expr
, 0),
284 TREE_OPERAND (expr
, 1),
285 TREE_OPERAND (expr
, 2));
287 while (handled_component_p (*t
))
288 t
= &TREE_OPERAND (*t
, 0);
290 /* Canonicalize MEM_REFs invariant address operand. Do this first
291 to avoid feeding non-canonical MEM_REFs elsewhere. */
292 if (TREE_CODE (*t
) == MEM_REF
293 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
295 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
296 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
297 TREE_OPERAND (*t
, 0),
298 TREE_OPERAND (*t
, 1));
301 TREE_THIS_VOLATILE (tem
) = volatile_p
;
303 tem
= maybe_fold_reference (expr
, is_lhs
);
311 && (result
= fold_const_aggregate_ref (expr
))
312 && is_gimple_min_invariant (result
))
315 /* Fold back MEM_REFs to reference trees. */
316 if (TREE_CODE (*t
) == MEM_REF
317 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
318 && integer_zerop (TREE_OPERAND (*t
, 1))
319 && (TREE_THIS_VOLATILE (*t
)
320 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
321 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
322 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
323 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
324 /* We have to look out here to not drop a required conversion
325 from the rhs to the lhs if is_lhs, but we don't have the
326 rhs here to verify that. Thus require strict type
328 && types_compatible_p (TREE_TYPE (*t
),
329 TREE_TYPE (TREE_OPERAND
330 (TREE_OPERAND (*t
, 0), 0))))
333 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
334 tem
= maybe_fold_reference (expr
, is_lhs
);
339 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
341 tree tem
= maybe_fold_tmr (*t
);
345 tem
= maybe_fold_reference (expr
, is_lhs
);
356 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
357 replacement rhs for the statement or NULL_TREE if no simplification
358 could be made. It is assumed that the operands have been previously
362 fold_gimple_assign (gimple_stmt_iterator
*si
)
364 gimple stmt
= gsi_stmt (*si
);
365 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
366 location_t loc
= gimple_location (stmt
);
368 tree result
= NULL_TREE
;
370 switch (get_gimple_rhs_class (subcode
))
372 case GIMPLE_SINGLE_RHS
:
374 tree rhs
= gimple_assign_rhs1 (stmt
);
376 if (REFERENCE_CLASS_P (rhs
))
377 return maybe_fold_reference (rhs
, false);
379 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
381 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
382 if (is_gimple_min_invariant (val
))
384 else if (flag_devirtualize
&& virtual_method_call_p (val
))
387 vec
<cgraph_node
*>targets
388 = possible_polymorphic_call_targets (val
, &final
);
389 if (final
&& targets
.length () <= 1)
392 if (targets
.length () == 1)
393 fndecl
= targets
[0]->decl
;
395 fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
396 val
= fold_convert (TREE_TYPE (val
), fndecl
);
397 STRIP_USELESS_TYPE_CONVERSION (val
);
403 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
405 tree ref
= TREE_OPERAND (rhs
, 0);
406 tree tem
= maybe_fold_reference (ref
, true);
408 && TREE_CODE (tem
) == MEM_REF
409 && integer_zerop (TREE_OPERAND (tem
, 1)))
410 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
412 result
= fold_convert (TREE_TYPE (rhs
),
413 build_fold_addr_expr_loc (loc
, tem
));
414 else if (TREE_CODE (ref
) == MEM_REF
415 && integer_zerop (TREE_OPERAND (ref
, 1)))
416 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
419 else if (TREE_CODE (rhs
) == CONSTRUCTOR
420 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
421 && (CONSTRUCTOR_NELTS (rhs
)
422 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
424 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
429 if (TREE_CODE (val
) != INTEGER_CST
430 && TREE_CODE (val
) != REAL_CST
431 && TREE_CODE (val
) != FIXED_CST
)
434 return build_vector_from_ctor (TREE_TYPE (rhs
),
435 CONSTRUCTOR_ELTS (rhs
));
438 else if (DECL_P (rhs
))
439 return get_symbol_constant_value (rhs
);
441 /* If we couldn't fold the RHS, hand over to the generic
443 if (result
== NULL_TREE
)
446 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
447 that may have been added by fold, and "useless" type
448 conversions that might now be apparent due to propagation. */
449 STRIP_USELESS_TYPE_CONVERSION (result
);
451 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
458 case GIMPLE_UNARY_RHS
:
460 tree rhs
= gimple_assign_rhs1 (stmt
);
462 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
465 /* If the operation was a conversion do _not_ mark a
466 resulting constant with TREE_OVERFLOW if the original
467 constant was not. These conversions have implementation
468 defined behavior and retaining the TREE_OVERFLOW flag
469 here would confuse later passes such as VRP. */
470 if (CONVERT_EXPR_CODE_P (subcode
)
471 && TREE_CODE (result
) == INTEGER_CST
472 && TREE_CODE (rhs
) == INTEGER_CST
)
473 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
475 STRIP_USELESS_TYPE_CONVERSION (result
);
476 if (valid_gimple_rhs_p (result
))
482 case GIMPLE_BINARY_RHS
:
483 /* Try to canonicalize for boolean-typed X the comparisons
484 X == 0, X == 1, X != 0, and X != 1. */
485 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
486 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
488 tree lhs
= gimple_assign_lhs (stmt
);
489 tree op1
= gimple_assign_rhs1 (stmt
);
490 tree op2
= gimple_assign_rhs2 (stmt
);
491 tree type
= TREE_TYPE (op1
);
493 /* Check whether the comparison operands are of the same boolean
494 type as the result type is.
495 Check that second operand is an integer-constant with value
497 if (TREE_CODE (op2
) == INTEGER_CST
498 && (integer_zerop (op2
) || integer_onep (op2
))
499 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
501 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
502 bool is_logical_not
= false;
504 /* X == 0 and X != 1 is a logical-not.of X
505 X == 1 and X != 0 is X */
506 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
507 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
508 is_logical_not
= true;
510 if (is_logical_not
== false)
512 /* Only for one-bit precision typed X the transformation
513 !X -> ~X is valied. */
514 else if (TYPE_PRECISION (type
) == 1)
515 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
517 /* Otherwise we use !X -> X ^ 1. */
519 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
520 type
, op1
, build_int_cst (type
, 1));
526 result
= fold_binary_loc (loc
, subcode
,
527 TREE_TYPE (gimple_assign_lhs (stmt
)),
528 gimple_assign_rhs1 (stmt
),
529 gimple_assign_rhs2 (stmt
));
533 STRIP_USELESS_TYPE_CONVERSION (result
);
534 if (valid_gimple_rhs_p (result
))
539 case GIMPLE_TERNARY_RHS
:
540 /* Try to fold a conditional expression. */
541 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
543 tree op0
= gimple_assign_rhs1 (stmt
);
546 location_t cond_loc
= gimple_location (stmt
);
548 if (COMPARISON_CLASS_P (op0
))
550 fold_defer_overflow_warnings ();
551 tem
= fold_binary_loc (cond_loc
,
552 TREE_CODE (op0
), TREE_TYPE (op0
),
553 TREE_OPERAND (op0
, 0),
554 TREE_OPERAND (op0
, 1));
555 /* This is actually a conditional expression, not a GIMPLE
556 conditional statement, however, the valid_gimple_rhs_p
557 test still applies. */
558 set
= (tem
&& is_gimple_condexpr (tem
)
559 && valid_gimple_rhs_p (tem
));
560 fold_undefer_overflow_warnings (set
, stmt
, 0);
562 else if (is_gimple_min_invariant (op0
))
571 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
572 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
573 gimple_assign_rhs2 (stmt
),
574 gimple_assign_rhs3 (stmt
));
578 result
= fold_ternary_loc (loc
, subcode
,
579 TREE_TYPE (gimple_assign_lhs (stmt
)),
580 gimple_assign_rhs1 (stmt
),
581 gimple_assign_rhs2 (stmt
),
582 gimple_assign_rhs3 (stmt
));
586 STRIP_USELESS_TYPE_CONVERSION (result
);
587 if (valid_gimple_rhs_p (result
))
592 case GIMPLE_INVALID_RHS
:
599 /* Attempt to fold a conditional statement. Return true if any changes were
600 made. We only attempt to fold the condition expression, and do not perform
601 any transformation that would require alteration of the cfg. It is
602 assumed that the operands have been previously folded. */
605 fold_gimple_cond (gimple stmt
)
607 tree result
= fold_binary_loc (gimple_location (stmt
),
608 gimple_cond_code (stmt
),
610 gimple_cond_lhs (stmt
),
611 gimple_cond_rhs (stmt
));
615 STRIP_USELESS_TYPE_CONVERSION (result
);
616 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
618 gimple_cond_set_condition_from_tree (stmt
, result
);
626 /* Convert EXPR into a GIMPLE value suitable for substitution on the
627 RHS of an assignment. Insert the necessary statements before
628 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
629 is replaced. If the call is expected to produces a result, then it
630 is replaced by an assignment of the new RHS to the result variable.
631 If the result is to be ignored, then the call is replaced by a
632 GIMPLE_NOP. A proper VDEF chain is retained by making the first
633 VUSE and the last VDEF of the whole sequence be the same as the replaced
634 statement and using new SSA names for stores in between. */
637 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
640 gimple stmt
, new_stmt
;
641 gimple_stmt_iterator i
;
642 gimple_seq stmts
= NULL
;
646 stmt
= gsi_stmt (*si_p
);
648 gcc_assert (is_gimple_call (stmt
));
650 push_gimplify_context (gimple_in_ssa_p (cfun
));
652 lhs
= gimple_call_lhs (stmt
);
653 if (lhs
== NULL_TREE
)
655 gimplify_and_add (expr
, &stmts
);
656 /* We can end up with folding a memcpy of an empty class assignment
657 which gets optimized away by C++ gimplification. */
658 if (gimple_seq_empty_p (stmts
))
660 pop_gimplify_context (NULL
);
661 if (gimple_in_ssa_p (cfun
))
663 unlink_stmt_vdef (stmt
);
666 gsi_replace (si_p
, gimple_build_nop (), true);
672 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
673 new_stmt
= gimple_build_assign (lhs
, tmp
);
674 i
= gsi_last (stmts
);
675 gsi_insert_after_without_update (&i
, new_stmt
,
676 GSI_CONTINUE_LINKING
);
679 pop_gimplify_context (NULL
);
681 if (gimple_has_location (stmt
))
682 annotate_all_with_location (stmts
, gimple_location (stmt
));
684 /* First iterate over the replacement statements backward, assigning
685 virtual operands to their defining statements. */
687 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
689 new_stmt
= gsi_stmt (i
);
690 if ((gimple_assign_single_p (new_stmt
)
691 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
692 || (is_gimple_call (new_stmt
)
693 && (gimple_call_flags (new_stmt
)
694 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
698 vdef
= gimple_vdef (stmt
);
700 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
701 gimple_set_vdef (new_stmt
, vdef
);
702 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
703 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
704 laststore
= new_stmt
;
708 /* Second iterate over the statements forward, assigning virtual
709 operands to their uses. */
710 reaching_vuse
= gimple_vuse (stmt
);
711 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
713 new_stmt
= gsi_stmt (i
);
714 /* If the new statement possibly has a VUSE, update it with exact SSA
715 name we know will reach this one. */
716 if (gimple_has_mem_ops (new_stmt
))
717 gimple_set_vuse (new_stmt
, reaching_vuse
);
718 gimple_set_modified (new_stmt
, true);
719 if (gimple_vdef (new_stmt
))
720 reaching_vuse
= gimple_vdef (new_stmt
);
723 /* If the new sequence does not do a store release the virtual
724 definition of the original statement. */
726 && reaching_vuse
== gimple_vuse (stmt
))
728 tree vdef
= gimple_vdef (stmt
);
730 && TREE_CODE (vdef
) == SSA_NAME
)
732 unlink_stmt_vdef (stmt
);
733 release_ssa_name (vdef
);
737 /* Finally replace the original statement with the sequence. */
738 gsi_replace_with_seq (si_p
, stmts
, false);
741 /* Return the string length, maximum string length or maximum value of
743 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
744 is not NULL and, for TYPE == 0, its value is not equal to the length
745 we determine or if we are unable to determine the length or value,
746 return false. VISITED is a bitmap of visited variables.
747 TYPE is 0 if string length should be returned, 1 for maximum string
748 length and 2 for maximum value ARG can have. */
751 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
756 if (TREE_CODE (arg
) != SSA_NAME
)
758 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
759 if (TREE_CODE (arg
) == ADDR_EXPR
760 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
761 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
763 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
764 if (TREE_CODE (aop0
) == INDIRECT_REF
765 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
766 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
767 length
, visited
, type
);
773 if (TREE_CODE (val
) != INTEGER_CST
774 || tree_int_cst_sgn (val
) < 0)
778 val
= c_strlen (arg
, 1);
786 if (TREE_CODE (*length
) != INTEGER_CST
787 || TREE_CODE (val
) != INTEGER_CST
)
790 if (tree_int_cst_lt (*length
, val
))
794 else if (simple_cst_equal (val
, *length
) != 1)
802 /* If ARG is registered for SSA update we cannot look at its defining
804 if (name_registered_for_update_p (arg
))
807 /* If we were already here, break the infinite cycle. */
808 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
812 def_stmt
= SSA_NAME_DEF_STMT (var
);
814 switch (gimple_code (def_stmt
))
817 /* The RHS of the statement defining VAR must either have a
818 constant length or come from another SSA_NAME with a constant
820 if (gimple_assign_single_p (def_stmt
)
821 || gimple_assign_unary_nop_p (def_stmt
))
823 tree rhs
= gimple_assign_rhs1 (def_stmt
);
824 return get_maxval_strlen (rhs
, length
, visited
, type
);
826 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
828 tree op2
= gimple_assign_rhs2 (def_stmt
);
829 tree op3
= gimple_assign_rhs3 (def_stmt
);
830 return get_maxval_strlen (op2
, length
, visited
, type
)
831 && get_maxval_strlen (op3
, length
, visited
, type
);
837 /* All the arguments of the PHI node must have the same constant
841 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
843 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
845 /* If this PHI has itself as an argument, we cannot
846 determine the string length of this argument. However,
847 if we can find a constant string length for the other
848 PHI args then we can still be sure that this is a
849 constant string length. So be optimistic and just
850 continue with the next argument. */
851 if (arg
== gimple_phi_result (def_stmt
))
854 if (!get_maxval_strlen (arg
, length
, visited
, type
))
866 /* Fold builtin call in statement STMT. Returns a simplified tree.
867 We may return a non-constant expression, including another call
868 to a different function and with different arguments, e.g.,
869 substituting memcpy for strcpy when the string length is known.
870 Note that some builtins expand into inline code that may not
871 be valid in GIMPLE. Callers must take care. */
874 gimple_fold_builtin (gimple stmt
)
882 location_t loc
= gimple_location (stmt
);
884 ignore
= (gimple_call_lhs (stmt
) == NULL
);
886 /* First try the generic builtin folder. If that succeeds, return the
888 result
= fold_call_stmt (stmt
, ignore
);
894 result
= fold_convert (gimple_call_return_type (stmt
), result
);
898 /* Ignore MD builtins. */
899 callee
= gimple_call_fndecl (stmt
);
900 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
903 /* Give up for always_inline inline builtins until they are
905 if (avoid_folding_inline_builtin (callee
))
908 /* If the builtin could not be folded, and it has no argument list,
910 nargs
= gimple_call_num_args (stmt
);
914 /* Limit the work only for builtins we know how to simplify. */
915 switch (DECL_FUNCTION_CODE (callee
))
917 case BUILT_IN_STRLEN
:
919 case BUILT_IN_FPUTS_UNLOCKED
:
923 case BUILT_IN_STRCPY
:
924 case BUILT_IN_STRNCPY
:
925 case BUILT_IN_STRCAT
:
929 case BUILT_IN_MEMCPY_CHK
:
930 case BUILT_IN_MEMPCPY_CHK
:
931 case BUILT_IN_MEMMOVE_CHK
:
932 case BUILT_IN_MEMSET_CHK
:
933 case BUILT_IN_STRNCPY_CHK
:
934 case BUILT_IN_STPNCPY_CHK
:
938 case BUILT_IN_STRCPY_CHK
:
939 case BUILT_IN_STPCPY_CHK
:
943 case BUILT_IN_SNPRINTF_CHK
:
944 case BUILT_IN_VSNPRINTF_CHK
:
952 if (arg_idx
>= nargs
)
955 /* Try to use the dataflow information gathered by the CCP process. */
956 visited
= BITMAP_ALLOC (NULL
);
957 bitmap_clear (visited
);
959 memset (val
, 0, sizeof (val
));
960 a
= gimple_call_arg (stmt
, arg_idx
);
961 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
962 val
[arg_idx
] = NULL_TREE
;
964 BITMAP_FREE (visited
);
967 switch (DECL_FUNCTION_CODE (callee
))
969 case BUILT_IN_STRLEN
:
970 if (val
[0] && nargs
== 1)
973 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
975 /* If the result is not a valid gimple value, or not a cast
976 of a valid gimple value, then we cannot use the result. */
977 if (is_gimple_val (new_val
)
978 || (CONVERT_EXPR_P (new_val
)
979 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
984 case BUILT_IN_STRCPY
:
985 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
986 result
= fold_builtin_strcpy (loc
, callee
,
987 gimple_call_arg (stmt
, 0),
988 gimple_call_arg (stmt
, 1),
992 case BUILT_IN_STRNCPY
:
993 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
994 result
= fold_builtin_strncpy (loc
, callee
,
995 gimple_call_arg (stmt
, 0),
996 gimple_call_arg (stmt
, 1),
997 gimple_call_arg (stmt
, 2),
1001 case BUILT_IN_STRCAT
:
1002 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
1003 result
= fold_builtin_strcat (loc
, gimple_call_arg (stmt
, 0),
1004 gimple_call_arg (stmt
, 1),
1008 case BUILT_IN_FPUTS
:
1010 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1011 gimple_call_arg (stmt
, 1),
1012 ignore
, false, val
[0]);
1015 case BUILT_IN_FPUTS_UNLOCKED
:
1017 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1018 gimple_call_arg (stmt
, 1),
1019 ignore
, true, val
[0]);
1022 case BUILT_IN_MEMCPY_CHK
:
1023 case BUILT_IN_MEMPCPY_CHK
:
1024 case BUILT_IN_MEMMOVE_CHK
:
1025 case BUILT_IN_MEMSET_CHK
:
1026 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1027 result
= fold_builtin_memory_chk (loc
, callee
,
1028 gimple_call_arg (stmt
, 0),
1029 gimple_call_arg (stmt
, 1),
1030 gimple_call_arg (stmt
, 2),
1031 gimple_call_arg (stmt
, 3),
1033 DECL_FUNCTION_CODE (callee
));
1036 case BUILT_IN_STRCPY_CHK
:
1037 case BUILT_IN_STPCPY_CHK
:
1038 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1039 result
= fold_builtin_stxcpy_chk (loc
, callee
,
1040 gimple_call_arg (stmt
, 0),
1041 gimple_call_arg (stmt
, 1),
1042 gimple_call_arg (stmt
, 2),
1044 DECL_FUNCTION_CODE (callee
));
1047 case BUILT_IN_STRNCPY_CHK
:
1048 case BUILT_IN_STPNCPY_CHK
:
1049 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1050 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1051 gimple_call_arg (stmt
, 1),
1052 gimple_call_arg (stmt
, 2),
1053 gimple_call_arg (stmt
, 3),
1055 DECL_FUNCTION_CODE (callee
));
1058 case BUILT_IN_SNPRINTF_CHK
:
1059 case BUILT_IN_VSNPRINTF_CHK
:
1060 if (val
[1] && is_gimple_val (val
[1]))
1061 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1062 DECL_FUNCTION_CODE (callee
));
1069 if (result
&& ignore
)
1070 result
= fold_ignored_result (result
);
1075 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1076 The statement may be replaced by another statement, e.g., if the call
1077 simplifies to a constant value. Return true if any changes were made.
1078 It is assumed that the operands have been previously folded. */
1081 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1083 gimple stmt
= gsi_stmt (*gsi
);
1085 bool changed
= false;
1088 /* Fold *& in call arguments. */
1089 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1090 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1092 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1095 gimple_call_set_arg (stmt
, i
, tmp
);
1100 /* Check for virtual calls that became direct calls. */
1101 callee
= gimple_call_fn (stmt
);
1102 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1104 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1106 if (dump_file
&& virtual_method_call_p (callee
)
1107 && !possible_polymorphic_call_target_p
1108 (callee
, cgraph_get_node (gimple_call_addr_fndecl
1109 (OBJ_TYPE_REF_EXPR (callee
)))))
1112 "Type inheritance inconsistent devirtualization of ");
1113 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1114 fprintf (dump_file
, " to ");
1115 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
1116 fprintf (dump_file
, "\n");
1119 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1122 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
1125 vec
<cgraph_node
*>targets
1126 = possible_polymorphic_call_targets (callee
, &final
);
1127 if (final
&& targets
.length () <= 1)
1129 tree lhs
= gimple_call_lhs (stmt
);
1130 if (targets
.length () == 1)
1132 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
1134 /* If the call becomes noreturn, remove the lhs. */
1135 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
1137 if (TREE_CODE (lhs
) == SSA_NAME
)
1139 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
1140 tree def
= get_or_create_ssa_default_def (cfun
, var
);
1141 gimple new_stmt
= gimple_build_assign (lhs
, def
);
1142 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1144 gimple_call_set_lhs (stmt
, NULL_TREE
);
1149 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
1150 gimple new_stmt
= gimple_build_call (fndecl
, 0);
1151 gimple_set_location (new_stmt
, gimple_location (stmt
));
1152 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
1154 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
1155 tree def
= get_or_create_ssa_default_def (cfun
, var
);
1156 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1157 update_call_from_tree (gsi
, def
);
1160 gsi_replace (gsi
, new_stmt
, true);
1170 /* Check for builtins that CCP can handle using information not
1171 available in the generic fold routines. */
1172 if (gimple_call_builtin_p (stmt
))
1174 tree result
= gimple_fold_builtin (stmt
);
1177 if (!update_call_from_tree (gsi
, result
))
1178 gimplify_and_update_call_from_tree (gsi
, result
);
1181 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
1182 changed
|= targetm
.gimple_fold_builtin (gsi
);
1188 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1189 distinguishes both cases. */
1192 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1194 bool changed
= false;
1195 gimple stmt
= gsi_stmt (*gsi
);
1198 /* Fold the main computation performed by the statement. */
1199 switch (gimple_code (stmt
))
1203 unsigned old_num_ops
= gimple_num_ops (stmt
);
1204 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1205 tree lhs
= gimple_assign_lhs (stmt
);
1207 /* First canonicalize operand order. This avoids building new
1208 trees if this is the only thing fold would later do. */
1209 if ((commutative_tree_code (subcode
)
1210 || commutative_ternary_tree_code (subcode
))
1211 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1212 gimple_assign_rhs2 (stmt
), false))
1214 tree tem
= gimple_assign_rhs1 (stmt
);
1215 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1216 gimple_assign_set_rhs2 (stmt
, tem
);
1219 new_rhs
= fold_gimple_assign (gsi
);
1221 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1222 TREE_TYPE (new_rhs
)))
1223 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1226 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1228 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1235 changed
|= fold_gimple_cond (stmt
);
1239 changed
|= gimple_fold_call (gsi
, inplace
);
1243 /* Fold *& in asm operands. */
1246 const char **oconstraints
;
1247 const char *constraint
;
1248 bool allows_mem
, allows_reg
;
1250 noutputs
= gimple_asm_noutputs (stmt
);
1251 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1253 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1255 tree link
= gimple_asm_output_op (stmt
, i
);
1256 tree op
= TREE_VALUE (link
);
1258 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1259 if (REFERENCE_CLASS_P (op
)
1260 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1262 TREE_VALUE (link
) = op
;
1266 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1268 tree link
= gimple_asm_input_op (stmt
, i
);
1269 tree op
= TREE_VALUE (link
);
1271 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1272 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1273 oconstraints
, &allows_mem
, &allows_reg
);
1274 if (REFERENCE_CLASS_P (op
)
1275 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1278 TREE_VALUE (link
) = op
;
1286 if (gimple_debug_bind_p (stmt
))
1288 tree val
= gimple_debug_bind_get_value (stmt
);
1290 && REFERENCE_CLASS_P (val
))
1292 tree tem
= maybe_fold_reference (val
, false);
1295 gimple_debug_bind_set_value (stmt
, tem
);
1300 && TREE_CODE (val
) == ADDR_EXPR
)
1302 tree ref
= TREE_OPERAND (val
, 0);
1303 tree tem
= maybe_fold_reference (ref
, false);
1306 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1307 gimple_debug_bind_set_value (stmt
, tem
);
1317 stmt
= gsi_stmt (*gsi
);
1319 /* Fold *& on the lhs. */
1320 if (gimple_has_lhs (stmt
))
1322 tree lhs
= gimple_get_lhs (stmt
);
1323 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1325 tree new_lhs
= maybe_fold_reference (lhs
, true);
1328 gimple_set_lhs (stmt
, new_lhs
);
1337 /* Fold the statement pointed to by GSI. In some cases, this function may
1338 replace the whole statement with a new one. Returns true iff folding
1340 The statement pointed to by GSI should be in valid gimple form but may
1341 be in unfolded state as resulting from for example constant propagation
1342 which can produce *&x = 0. */
1345 fold_stmt (gimple_stmt_iterator
*gsi
)
1347 return fold_stmt_1 (gsi
, false);
1350 /* Perform the minimal folding on statement *GSI. Only operations like
1351 *&x created by constant propagation are handled. The statement cannot
1352 be replaced with a new one. Return true if the statement was
1353 changed, false otherwise.
1354 The statement *GSI should be in valid gimple form but may
1355 be in unfolded state as resulting from for example constant propagation
1356 which can produce *&x = 0. */
1359 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1361 gimple stmt
= gsi_stmt (*gsi
);
1362 bool changed
= fold_stmt_1 (gsi
, true);
1363 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1367 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1368 if EXPR is null or we don't know how.
1369 If non-null, the result always has boolean type. */
1372 canonicalize_bool (tree expr
, bool invert
)
1378 if (integer_nonzerop (expr
))
1379 return boolean_false_node
;
1380 else if (integer_zerop (expr
))
1381 return boolean_true_node
;
1382 else if (TREE_CODE (expr
) == SSA_NAME
)
1383 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1384 build_int_cst (TREE_TYPE (expr
), 0));
1385 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1386 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1388 TREE_OPERAND (expr
, 0),
1389 TREE_OPERAND (expr
, 1));
1395 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1397 if (integer_nonzerop (expr
))
1398 return boolean_true_node
;
1399 else if (integer_zerop (expr
))
1400 return boolean_false_node
;
1401 else if (TREE_CODE (expr
) == SSA_NAME
)
1402 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1403 build_int_cst (TREE_TYPE (expr
), 0));
1404 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1405 return fold_build2 (TREE_CODE (expr
),
1407 TREE_OPERAND (expr
, 0),
1408 TREE_OPERAND (expr
, 1));
1414 /* Check to see if a boolean expression EXPR is logically equivalent to the
1415 comparison (OP1 CODE OP2). Check for various identities involving
1419 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1420 const_tree op1
, const_tree op2
)
1424 /* The obvious case. */
1425 if (TREE_CODE (expr
) == code
1426 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1427 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1430 /* Check for comparing (name, name != 0) and the case where expr
1431 is an SSA_NAME with a definition matching the comparison. */
1432 if (TREE_CODE (expr
) == SSA_NAME
1433 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1435 if (operand_equal_p (expr
, op1
, 0))
1436 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1437 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1438 s
= SSA_NAME_DEF_STMT (expr
);
1439 if (is_gimple_assign (s
)
1440 && gimple_assign_rhs_code (s
) == code
1441 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1442 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1446 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1447 of name is a comparison, recurse. */
1448 if (TREE_CODE (op1
) == SSA_NAME
1449 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1451 s
= SSA_NAME_DEF_STMT (op1
);
1452 if (is_gimple_assign (s
)
1453 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1455 enum tree_code c
= gimple_assign_rhs_code (s
);
1456 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1457 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1458 return same_bool_comparison_p (expr
, c
,
1459 gimple_assign_rhs1 (s
),
1460 gimple_assign_rhs2 (s
));
1461 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1462 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1463 return same_bool_comparison_p (expr
,
1464 invert_tree_comparison (c
, false),
1465 gimple_assign_rhs1 (s
),
1466 gimple_assign_rhs2 (s
));
1472 /* Check to see if two boolean expressions OP1 and OP2 are logically
1476 same_bool_result_p (const_tree op1
, const_tree op2
)
1478 /* Simple cases first. */
1479 if (operand_equal_p (op1
, op2
, 0))
1482 /* Check the cases where at least one of the operands is a comparison.
1483 These are a bit smarter than operand_equal_p in that they apply some
1484 identifies on SSA_NAMEs. */
1485 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1486 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1487 TREE_OPERAND (op2
, 0),
1488 TREE_OPERAND (op2
, 1)))
1490 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1491 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1492 TREE_OPERAND (op1
, 0),
1493 TREE_OPERAND (op1
, 1)))
1500 /* Forward declarations for some mutually recursive functions. */
1503 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1504 enum tree_code code2
, tree op2a
, tree op2b
);
1506 and_var_with_comparison (tree var
, bool invert
,
1507 enum tree_code code2
, tree op2a
, tree op2b
);
1509 and_var_with_comparison_1 (gimple stmt
,
1510 enum tree_code code2
, tree op2a
, tree op2b
);
1512 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1513 enum tree_code code2
, tree op2a
, tree op2b
);
1515 or_var_with_comparison (tree var
, bool invert
,
1516 enum tree_code code2
, tree op2a
, tree op2b
);
1518 or_var_with_comparison_1 (gimple stmt
,
1519 enum tree_code code2
, tree op2a
, tree op2b
);
1521 /* Helper function for and_comparisons_1: try to simplify the AND of the
1522 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1523 If INVERT is true, invert the value of the VAR before doing the AND.
1524 Return NULL_EXPR if we can't simplify this to a single expression. */
1527 and_var_with_comparison (tree var
, bool invert
,
1528 enum tree_code code2
, tree op2a
, tree op2b
)
1531 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1533 /* We can only deal with variables whose definitions are assignments. */
1534 if (!is_gimple_assign (stmt
))
1537 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1538 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1539 Then we only have to consider the simpler non-inverted cases. */
1541 t
= or_var_with_comparison_1 (stmt
,
1542 invert_tree_comparison (code2
, false),
1545 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1546 return canonicalize_bool (t
, invert
);
1549 /* Try to simplify the AND of the ssa variable defined by the assignment
1550 STMT with the comparison specified by (OP2A CODE2 OP2B).
1551 Return NULL_EXPR if we can't simplify this to a single expression. */
1554 and_var_with_comparison_1 (gimple stmt
,
1555 enum tree_code code2
, tree op2a
, tree op2b
)
1557 tree var
= gimple_assign_lhs (stmt
);
1558 tree true_test_var
= NULL_TREE
;
1559 tree false_test_var
= NULL_TREE
;
1560 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1562 /* Check for identities like (var AND (var == 0)) => false. */
1563 if (TREE_CODE (op2a
) == SSA_NAME
1564 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1566 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1567 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1569 true_test_var
= op2a
;
1570 if (var
== true_test_var
)
1573 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1574 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1576 false_test_var
= op2a
;
1577 if (var
== false_test_var
)
1578 return boolean_false_node
;
1582 /* If the definition is a comparison, recurse on it. */
1583 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1585 tree t
= and_comparisons_1 (innercode
,
1586 gimple_assign_rhs1 (stmt
),
1587 gimple_assign_rhs2 (stmt
),
1595 /* If the definition is an AND or OR expression, we may be able to
1596 simplify by reassociating. */
1597 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1598 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1600 tree inner1
= gimple_assign_rhs1 (stmt
);
1601 tree inner2
= gimple_assign_rhs2 (stmt
);
1604 tree partial
= NULL_TREE
;
1605 bool is_and
= (innercode
== BIT_AND_EXPR
);
1607 /* Check for boolean identities that don't require recursive examination
1609 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1610 inner1 AND (inner1 OR inner2) => inner1
1611 !inner1 AND (inner1 AND inner2) => false
1612 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1613 Likewise for similar cases involving inner2. */
1614 if (inner1
== true_test_var
)
1615 return (is_and
? var
: inner1
);
1616 else if (inner2
== true_test_var
)
1617 return (is_and
? var
: inner2
);
1618 else if (inner1
== false_test_var
)
1620 ? boolean_false_node
1621 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1622 else if (inner2
== false_test_var
)
1624 ? boolean_false_node
1625 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1627 /* Next, redistribute/reassociate the AND across the inner tests.
1628 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1629 if (TREE_CODE (inner1
) == SSA_NAME
1630 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1631 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1632 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1633 gimple_assign_rhs1 (s
),
1634 gimple_assign_rhs2 (s
),
1635 code2
, op2a
, op2b
)))
1637 /* Handle the AND case, where we are reassociating:
1638 (inner1 AND inner2) AND (op2a code2 op2b)
1640 If the partial result t is a constant, we win. Otherwise
1641 continue on to try reassociating with the other inner test. */
1644 if (integer_onep (t
))
1646 else if (integer_zerop (t
))
1647 return boolean_false_node
;
1650 /* Handle the OR case, where we are redistributing:
1651 (inner1 OR inner2) AND (op2a code2 op2b)
1652 => (t OR (inner2 AND (op2a code2 op2b))) */
1653 else if (integer_onep (t
))
1654 return boolean_true_node
;
1656 /* Save partial result for later. */
1660 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1661 if (TREE_CODE (inner2
) == SSA_NAME
1662 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1663 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1664 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1665 gimple_assign_rhs1 (s
),
1666 gimple_assign_rhs2 (s
),
1667 code2
, op2a
, op2b
)))
1669 /* Handle the AND case, where we are reassociating:
1670 (inner1 AND inner2) AND (op2a code2 op2b)
1671 => (inner1 AND t) */
1674 if (integer_onep (t
))
1676 else if (integer_zerop (t
))
1677 return boolean_false_node
;
1678 /* If both are the same, we can apply the identity
1680 else if (partial
&& same_bool_result_p (t
, partial
))
1684 /* Handle the OR case. where we are redistributing:
1685 (inner1 OR inner2) AND (op2a code2 op2b)
1686 => (t OR (inner1 AND (op2a code2 op2b)))
1687 => (t OR partial) */
1690 if (integer_onep (t
))
1691 return boolean_true_node
;
1694 /* We already got a simplification for the other
1695 operand to the redistributed OR expression. The
1696 interesting case is when at least one is false.
1697 Or, if both are the same, we can apply the identity
1699 if (integer_zerop (partial
))
1701 else if (integer_zerop (t
))
1703 else if (same_bool_result_p (t
, partial
))
1712 /* Try to simplify the AND of two comparisons defined by
1713 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1714 If this can be done without constructing an intermediate value,
1715 return the resulting tree; otherwise NULL_TREE is returned.
1716 This function is deliberately asymmetric as it recurses on SSA_DEFs
1717 in the first comparison but not the second. */
1720 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1721 enum tree_code code2
, tree op2a
, tree op2b
)
1723 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
1725 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1726 if (operand_equal_p (op1a
, op2a
, 0)
1727 && operand_equal_p (op1b
, op2b
, 0))
1729 /* Result will be either NULL_TREE, or a combined comparison. */
1730 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1731 TRUTH_ANDIF_EXPR
, code1
, code2
,
1732 truth_type
, op1a
, op1b
);
1737 /* Likewise the swapped case of the above. */
1738 if (operand_equal_p (op1a
, op2b
, 0)
1739 && operand_equal_p (op1b
, op2a
, 0))
1741 /* Result will be either NULL_TREE, or a combined comparison. */
1742 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1743 TRUTH_ANDIF_EXPR
, code1
,
1744 swap_tree_comparison (code2
),
1745 truth_type
, op1a
, op1b
);
1750 /* If both comparisons are of the same value against constants, we might
1751 be able to merge them. */
1752 if (operand_equal_p (op1a
, op2a
, 0)
1753 && TREE_CODE (op1b
) == INTEGER_CST
1754 && TREE_CODE (op2b
) == INTEGER_CST
)
1756 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1758 /* If we have (op1a == op1b), we should either be able to
1759 return that or FALSE, depending on whether the constant op1b
1760 also satisfies the other comparison against op2b. */
1761 if (code1
== EQ_EXPR
)
1767 case EQ_EXPR
: val
= (cmp
== 0); break;
1768 case NE_EXPR
: val
= (cmp
!= 0); break;
1769 case LT_EXPR
: val
= (cmp
< 0); break;
1770 case GT_EXPR
: val
= (cmp
> 0); break;
1771 case LE_EXPR
: val
= (cmp
<= 0); break;
1772 case GE_EXPR
: val
= (cmp
>= 0); break;
1773 default: done
= false;
1778 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1780 return boolean_false_node
;
1783 /* Likewise if the second comparison is an == comparison. */
1784 else if (code2
== EQ_EXPR
)
1790 case EQ_EXPR
: val
= (cmp
== 0); break;
1791 case NE_EXPR
: val
= (cmp
!= 0); break;
1792 case LT_EXPR
: val
= (cmp
> 0); break;
1793 case GT_EXPR
: val
= (cmp
< 0); break;
1794 case LE_EXPR
: val
= (cmp
>= 0); break;
1795 case GE_EXPR
: val
= (cmp
<= 0); break;
1796 default: done
= false;
1801 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1803 return boolean_false_node
;
1807 /* Same business with inequality tests. */
1808 else if (code1
== NE_EXPR
)
1813 case EQ_EXPR
: val
= (cmp
!= 0); break;
1814 case NE_EXPR
: val
= (cmp
== 0); break;
1815 case LT_EXPR
: val
= (cmp
>= 0); break;
1816 case GT_EXPR
: val
= (cmp
<= 0); break;
1817 case LE_EXPR
: val
= (cmp
> 0); break;
1818 case GE_EXPR
: val
= (cmp
< 0); break;
1823 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1825 else if (code2
== NE_EXPR
)
1830 case EQ_EXPR
: val
= (cmp
== 0); break;
1831 case NE_EXPR
: val
= (cmp
!= 0); break;
1832 case LT_EXPR
: val
= (cmp
<= 0); break;
1833 case GT_EXPR
: val
= (cmp
>= 0); break;
1834 case LE_EXPR
: val
= (cmp
< 0); break;
1835 case GE_EXPR
: val
= (cmp
> 0); break;
1840 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1843 /* Chose the more restrictive of two < or <= comparisons. */
1844 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1845 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1847 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1848 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1850 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1853 /* Likewise chose the more restrictive of two > or >= comparisons. */
1854 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1855 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1857 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1858 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1860 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1863 /* Check for singleton ranges. */
1865 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1866 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1867 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1869 /* Check for disjoint ranges. */
1871 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1872 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1873 return boolean_false_node
;
1875 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1876 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1877 return boolean_false_node
;
1880 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1881 NAME's definition is a truth value. See if there are any simplifications
1882 that can be done against the NAME's definition. */
1883 if (TREE_CODE (op1a
) == SSA_NAME
1884 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1885 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1887 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1888 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1889 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1890 switch (gimple_code (stmt
))
1893 /* Try to simplify by copy-propagating the definition. */
1894 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1897 /* If every argument to the PHI produces the same result when
1898 ANDed with the second comparison, we win.
1899 Do not do this unless the type is bool since we need a bool
1900 result here anyway. */
1901 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1903 tree result
= NULL_TREE
;
1905 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1907 tree arg
= gimple_phi_arg_def (stmt
, i
);
1909 /* If this PHI has itself as an argument, ignore it.
1910 If all the other args produce the same result,
1912 if (arg
== gimple_phi_result (stmt
))
1914 else if (TREE_CODE (arg
) == INTEGER_CST
)
1916 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1919 result
= boolean_false_node
;
1920 else if (!integer_zerop (result
))
1924 result
= fold_build2 (code2
, boolean_type_node
,
1926 else if (!same_bool_comparison_p (result
,
1930 else if (TREE_CODE (arg
) == SSA_NAME
1931 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1934 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1935 /* In simple cases we can look through PHI nodes,
1936 but we have to be careful with loops.
1938 if (! dom_info_available_p (CDI_DOMINATORS
)
1939 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1940 || dominated_by_p (CDI_DOMINATORS
,
1941 gimple_bb (def_stmt
),
1944 temp
= and_var_with_comparison (arg
, invert
, code2
,
1950 else if (!same_bool_result_p (result
, temp
))
1966 /* Try to simplify the AND of two comparisons, specified by
1967 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1968 If this can be simplified to a single expression (without requiring
1969 introducing more SSA variables to hold intermediate values),
1970 return the resulting tree. Otherwise return NULL_TREE.
1971 If the result expression is non-null, it has boolean type. */
1974 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1975 enum tree_code code2
, tree op2a
, tree op2b
)
1977 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1981 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1984 /* Helper function for or_comparisons_1: try to simplify the OR of the
1985 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1986 If INVERT is true, invert the value of VAR before doing the OR.
1987 Return NULL_EXPR if we can't simplify this to a single expression. */
1990 or_var_with_comparison (tree var
, bool invert
,
1991 enum tree_code code2
, tree op2a
, tree op2b
)
1994 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1996 /* We can only deal with variables whose definitions are assignments. */
1997 if (!is_gimple_assign (stmt
))
2000 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2001 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2002 Then we only have to consider the simpler non-inverted cases. */
2004 t
= and_var_with_comparison_1 (stmt
,
2005 invert_tree_comparison (code2
, false),
2008 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2009 return canonicalize_bool (t
, invert
);
2012 /* Try to simplify the OR of the ssa variable defined by the assignment
2013 STMT with the comparison specified by (OP2A CODE2 OP2B).
2014 Return NULL_EXPR if we can't simplify this to a single expression. */
2017 or_var_with_comparison_1 (gimple stmt
,
2018 enum tree_code code2
, tree op2a
, tree op2b
)
2020 tree var
= gimple_assign_lhs (stmt
);
2021 tree true_test_var
= NULL_TREE
;
2022 tree false_test_var
= NULL_TREE
;
2023 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2025 /* Check for identities like (var OR (var != 0)) => true . */
2026 if (TREE_CODE (op2a
) == SSA_NAME
2027 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2029 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2030 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2032 true_test_var
= op2a
;
2033 if (var
== true_test_var
)
2036 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2037 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2039 false_test_var
= op2a
;
2040 if (var
== false_test_var
)
2041 return boolean_true_node
;
2045 /* If the definition is a comparison, recurse on it. */
2046 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2048 tree t
= or_comparisons_1 (innercode
,
2049 gimple_assign_rhs1 (stmt
),
2050 gimple_assign_rhs2 (stmt
),
2058 /* If the definition is an AND or OR expression, we may be able to
2059 simplify by reassociating. */
2060 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2061 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2063 tree inner1
= gimple_assign_rhs1 (stmt
);
2064 tree inner2
= gimple_assign_rhs2 (stmt
);
2067 tree partial
= NULL_TREE
;
2068 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2070 /* Check for boolean identities that don't require recursive examination
2072 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2073 inner1 OR (inner1 AND inner2) => inner1
2074 !inner1 OR (inner1 OR inner2) => true
2075 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2077 if (inner1
== true_test_var
)
2078 return (is_or
? var
: inner1
);
2079 else if (inner2
== true_test_var
)
2080 return (is_or
? var
: inner2
);
2081 else if (inner1
== false_test_var
)
2084 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2085 else if (inner2
== false_test_var
)
2088 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2090 /* Next, redistribute/reassociate the OR across the inner tests.
2091 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2092 if (TREE_CODE (inner1
) == SSA_NAME
2093 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2094 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2095 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2096 gimple_assign_rhs1 (s
),
2097 gimple_assign_rhs2 (s
),
2098 code2
, op2a
, op2b
)))
2100 /* Handle the OR case, where we are reassociating:
2101 (inner1 OR inner2) OR (op2a code2 op2b)
2103 If the partial result t is a constant, we win. Otherwise
2104 continue on to try reassociating with the other inner test. */
2107 if (integer_onep (t
))
2108 return boolean_true_node
;
2109 else if (integer_zerop (t
))
2113 /* Handle the AND case, where we are redistributing:
2114 (inner1 AND inner2) OR (op2a code2 op2b)
2115 => (t AND (inner2 OR (op2a code op2b))) */
2116 else if (integer_zerop (t
))
2117 return boolean_false_node
;
2119 /* Save partial result for later. */
2123 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2124 if (TREE_CODE (inner2
) == SSA_NAME
2125 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2126 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2127 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2128 gimple_assign_rhs1 (s
),
2129 gimple_assign_rhs2 (s
),
2130 code2
, op2a
, op2b
)))
2132 /* Handle the OR case, where we are reassociating:
2133 (inner1 OR inner2) OR (op2a code2 op2b)
2135 => (t OR partial) */
2138 if (integer_zerop (t
))
2140 else if (integer_onep (t
))
2141 return boolean_true_node
;
2142 /* If both are the same, we can apply the identity
2144 else if (partial
&& same_bool_result_p (t
, partial
))
2148 /* Handle the AND case, where we are redistributing:
2149 (inner1 AND inner2) OR (op2a code2 op2b)
2150 => (t AND (inner1 OR (op2a code2 op2b)))
2151 => (t AND partial) */
2154 if (integer_zerop (t
))
2155 return boolean_false_node
;
2158 /* We already got a simplification for the other
2159 operand to the redistributed AND expression. The
2160 interesting case is when at least one is true.
2161 Or, if both are the same, we can apply the identity
2163 if (integer_onep (partial
))
2165 else if (integer_onep (t
))
2167 else if (same_bool_result_p (t
, partial
))
2176 /* Try to simplify the OR of two comparisons defined by
2177 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2178 If this can be done without constructing an intermediate value,
2179 return the resulting tree; otherwise NULL_TREE is returned.
2180 This function is deliberately asymmetric as it recurses on SSA_DEFs
2181 in the first comparison but not the second. */
2184 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2185 enum tree_code code2
, tree op2a
, tree op2b
)
2187 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
2189 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2190 if (operand_equal_p (op1a
, op2a
, 0)
2191 && operand_equal_p (op1b
, op2b
, 0))
2193 /* Result will be either NULL_TREE, or a combined comparison. */
2194 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2195 TRUTH_ORIF_EXPR
, code1
, code2
,
2196 truth_type
, op1a
, op1b
);
2201 /* Likewise the swapped case of the above. */
2202 if (operand_equal_p (op1a
, op2b
, 0)
2203 && operand_equal_p (op1b
, op2a
, 0))
2205 /* Result will be either NULL_TREE, or a combined comparison. */
2206 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2207 TRUTH_ORIF_EXPR
, code1
,
2208 swap_tree_comparison (code2
),
2209 truth_type
, op1a
, op1b
);
2214 /* If both comparisons are of the same value against constants, we might
2215 be able to merge them. */
2216 if (operand_equal_p (op1a
, op2a
, 0)
2217 && TREE_CODE (op1b
) == INTEGER_CST
2218 && TREE_CODE (op2b
) == INTEGER_CST
)
2220 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2222 /* If we have (op1a != op1b), we should either be able to
2223 return that or TRUE, depending on whether the constant op1b
2224 also satisfies the other comparison against op2b. */
2225 if (code1
== NE_EXPR
)
2231 case EQ_EXPR
: val
= (cmp
== 0); break;
2232 case NE_EXPR
: val
= (cmp
!= 0); break;
2233 case LT_EXPR
: val
= (cmp
< 0); break;
2234 case GT_EXPR
: val
= (cmp
> 0); break;
2235 case LE_EXPR
: val
= (cmp
<= 0); break;
2236 case GE_EXPR
: val
= (cmp
>= 0); break;
2237 default: done
= false;
2242 return boolean_true_node
;
2244 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2247 /* Likewise if the second comparison is a != comparison. */
2248 else if (code2
== NE_EXPR
)
2254 case EQ_EXPR
: val
= (cmp
== 0); break;
2255 case NE_EXPR
: val
= (cmp
!= 0); break;
2256 case LT_EXPR
: val
= (cmp
> 0); break;
2257 case GT_EXPR
: val
= (cmp
< 0); break;
2258 case LE_EXPR
: val
= (cmp
>= 0); break;
2259 case GE_EXPR
: val
= (cmp
<= 0); break;
2260 default: done
= false;
2265 return boolean_true_node
;
2267 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2271 /* See if an equality test is redundant with the other comparison. */
2272 else if (code1
== EQ_EXPR
)
2277 case EQ_EXPR
: val
= (cmp
== 0); break;
2278 case NE_EXPR
: val
= (cmp
!= 0); break;
2279 case LT_EXPR
: val
= (cmp
< 0); break;
2280 case GT_EXPR
: val
= (cmp
> 0); break;
2281 case LE_EXPR
: val
= (cmp
<= 0); break;
2282 case GE_EXPR
: val
= (cmp
>= 0); break;
2287 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2289 else if (code2
== EQ_EXPR
)
2294 case EQ_EXPR
: val
= (cmp
== 0); break;
2295 case NE_EXPR
: val
= (cmp
!= 0); break;
2296 case LT_EXPR
: val
= (cmp
> 0); break;
2297 case GT_EXPR
: val
= (cmp
< 0); break;
2298 case LE_EXPR
: val
= (cmp
>= 0); break;
2299 case GE_EXPR
: val
= (cmp
<= 0); break;
2304 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2307 /* Chose the less restrictive of two < or <= comparisons. */
2308 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2309 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2311 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2312 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2314 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2317 /* Likewise chose the less restrictive of two > or >= comparisons. */
2318 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2319 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2321 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2322 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2324 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2327 /* Check for singleton ranges. */
2329 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2330 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2331 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2333 /* Check for less/greater pairs that don't restrict the range at all. */
2335 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2336 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2337 return boolean_true_node
;
2339 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2340 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2341 return boolean_true_node
;
2344 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2345 NAME's definition is a truth value. See if there are any simplifications
2346 that can be done against the NAME's definition. */
2347 if (TREE_CODE (op1a
) == SSA_NAME
2348 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2349 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2351 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2352 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2353 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2354 switch (gimple_code (stmt
))
2357 /* Try to simplify by copy-propagating the definition. */
2358 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2361 /* If every argument to the PHI produces the same result when
2362 ORed with the second comparison, we win.
2363 Do not do this unless the type is bool since we need a bool
2364 result here anyway. */
2365 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2367 tree result
= NULL_TREE
;
2369 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2371 tree arg
= gimple_phi_arg_def (stmt
, i
);
2373 /* If this PHI has itself as an argument, ignore it.
2374 If all the other args produce the same result,
2376 if (arg
== gimple_phi_result (stmt
))
2378 else if (TREE_CODE (arg
) == INTEGER_CST
)
2380 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2383 result
= boolean_true_node
;
2384 else if (!integer_onep (result
))
2388 result
= fold_build2 (code2
, boolean_type_node
,
2390 else if (!same_bool_comparison_p (result
,
2394 else if (TREE_CODE (arg
) == SSA_NAME
2395 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2398 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2399 /* In simple cases we can look through PHI nodes,
2400 but we have to be careful with loops.
2402 if (! dom_info_available_p (CDI_DOMINATORS
)
2403 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2404 || dominated_by_p (CDI_DOMINATORS
,
2405 gimple_bb (def_stmt
),
2408 temp
= or_var_with_comparison (arg
, invert
, code2
,
2414 else if (!same_bool_result_p (result
, temp
))
2430 /* Try to simplify the OR of two comparisons, specified by
2431 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2432 If this can be simplified to a single expression (without requiring
2433 introducing more SSA variables to hold intermediate values),
2434 return the resulting tree. Otherwise return NULL_TREE.
2435 If the result expression is non-null, it has boolean type. */
2438 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2439 enum tree_code code2
, tree op2a
, tree op2b
)
2441 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2445 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2449 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2451 Either NULL_TREE, a simplified but non-constant or a constant
2454 ??? This should go into a gimple-fold-inline.h file to be eventually
2455 privatized with the single valueize function used in the various TUs
2456 to avoid the indirect function call overhead. */
2459 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2461 location_t loc
= gimple_location (stmt
);
2462 switch (gimple_code (stmt
))
2466 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2468 switch (get_gimple_rhs_class (subcode
))
2470 case GIMPLE_SINGLE_RHS
:
2472 tree rhs
= gimple_assign_rhs1 (stmt
);
2473 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2475 if (TREE_CODE (rhs
) == SSA_NAME
)
2477 /* If the RHS is an SSA_NAME, return its known constant value,
2479 return (*valueize
) (rhs
);
2481 /* Handle propagating invariant addresses into address
2483 else if (TREE_CODE (rhs
) == ADDR_EXPR
2484 && !is_gimple_min_invariant (rhs
))
2486 HOST_WIDE_INT offset
= 0;
2488 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2492 && (CONSTANT_CLASS_P (base
)
2493 || decl_address_invariant_p (base
)))
2494 return build_invariant_address (TREE_TYPE (rhs
),
2497 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2498 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2499 && (CONSTRUCTOR_NELTS (rhs
)
2500 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2505 vec
= XALLOCAVEC (tree
,
2506 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2507 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2509 val
= (*valueize
) (val
);
2510 if (TREE_CODE (val
) == INTEGER_CST
2511 || TREE_CODE (val
) == REAL_CST
2512 || TREE_CODE (val
) == FIXED_CST
)
2518 return build_vector (TREE_TYPE (rhs
), vec
);
2520 if (subcode
== OBJ_TYPE_REF
)
2522 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
2523 /* If callee is constant, we can fold away the wrapper. */
2524 if (is_gimple_min_invariant (val
))
2528 if (kind
== tcc_reference
)
2530 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2531 || TREE_CODE (rhs
) == REALPART_EXPR
2532 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2533 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2535 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2536 return fold_unary_loc (EXPR_LOCATION (rhs
),
2538 TREE_TYPE (rhs
), val
);
2540 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2541 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2543 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2544 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2546 TREE_TYPE (rhs
), val
,
2547 TREE_OPERAND (rhs
, 1),
2548 TREE_OPERAND (rhs
, 2));
2550 else if (TREE_CODE (rhs
) == MEM_REF
2551 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2553 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2554 if (TREE_CODE (val
) == ADDR_EXPR
2555 && is_gimple_min_invariant (val
))
2557 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2559 TREE_OPERAND (rhs
, 1));
2564 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2566 else if (kind
== tcc_declaration
)
2567 return get_symbol_constant_value (rhs
);
2571 case GIMPLE_UNARY_RHS
:
2573 /* Handle unary operators that can appear in GIMPLE form.
2574 Note that we know the single operand must be a constant,
2575 so this should almost always return a simplified RHS. */
2576 tree lhs
= gimple_assign_lhs (stmt
);
2577 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2579 /* Conversions are useless for CCP purposes if they are
2580 value-preserving. Thus the restrictions that
2581 useless_type_conversion_p places for restrict qualification
2582 of pointer types should not apply here.
2583 Substitution later will only substitute to allowed places. */
2584 if (CONVERT_EXPR_CODE_P (subcode
)
2585 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2586 && POINTER_TYPE_P (TREE_TYPE (op0
))
2587 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2588 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2589 && TYPE_MODE (TREE_TYPE (lhs
))
2590 == TYPE_MODE (TREE_TYPE (op0
)))
2594 fold_unary_ignore_overflow_loc (loc
, subcode
,
2595 gimple_expr_type (stmt
), op0
);
2598 case GIMPLE_BINARY_RHS
:
2600 /* Handle binary operators that can appear in GIMPLE form. */
2601 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2602 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2604 /* Translate &x + CST into an invariant form suitable for
2605 further propagation. */
2606 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2607 && TREE_CODE (op0
) == ADDR_EXPR
2608 && TREE_CODE (op1
) == INTEGER_CST
)
2610 tree off
= fold_convert (ptr_type_node
, op1
);
2611 return build_fold_addr_expr_loc
2613 fold_build2 (MEM_REF
,
2614 TREE_TYPE (TREE_TYPE (op0
)),
2615 unshare_expr (op0
), off
));
2618 return fold_binary_loc (loc
, subcode
,
2619 gimple_expr_type (stmt
), op0
, op1
);
2622 case GIMPLE_TERNARY_RHS
:
2624 /* Handle ternary operators that can appear in GIMPLE form. */
2625 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2626 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2627 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2629 /* Fold embedded expressions in ternary codes. */
2630 if ((subcode
== COND_EXPR
2631 || subcode
== VEC_COND_EXPR
)
2632 && COMPARISON_CLASS_P (op0
))
2634 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2635 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2636 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2637 TREE_TYPE (op0
), op00
, op01
);
2642 return fold_ternary_loc (loc
, subcode
,
2643 gimple_expr_type (stmt
), op0
, op1
, op2
);
2655 if (gimple_call_internal_p (stmt
))
2657 enum tree_code subcode
= ERROR_MARK
;
2658 switch (gimple_call_internal_fn (stmt
))
2660 case IFN_UBSAN_CHECK_ADD
:
2661 subcode
= PLUS_EXPR
;
2663 case IFN_UBSAN_CHECK_SUB
:
2664 subcode
= MINUS_EXPR
;
2666 case IFN_UBSAN_CHECK_MUL
:
2667 subcode
= MULT_EXPR
;
2672 tree op0
= (*valueize
) (gimple_call_arg (stmt
, 0));
2673 tree op1
= (*valueize
) (gimple_call_arg (stmt
, 1));
2675 if (TREE_CODE (op0
) != INTEGER_CST
2676 || TREE_CODE (op1
) != INTEGER_CST
)
2678 tree res
= fold_binary_loc (loc
, subcode
,
2679 TREE_TYPE (gimple_call_arg (stmt
, 0)),
2682 && TREE_CODE (res
) == INTEGER_CST
2683 && !TREE_OVERFLOW (res
))
2688 fn
= (*valueize
) (gimple_call_fn (stmt
));
2689 if (TREE_CODE (fn
) == ADDR_EXPR
2690 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2691 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
2692 && gimple_builtin_call_types_compatible_p (stmt
,
2693 TREE_OPERAND (fn
, 0)))
2695 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2698 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2699 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2700 call
= build_call_array_loc (loc
,
2701 gimple_call_return_type (stmt
),
2702 fn
, gimple_call_num_args (stmt
), args
);
2703 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2706 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2707 STRIP_NOPS (retval
);
2708 retval
= fold_convert (gimple_call_return_type (stmt
), retval
);
2720 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2721 Returns NULL_TREE if folding to a constant is not possible, otherwise
2722 returns a constant according to is_gimple_min_invariant. */
2725 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2727 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2728 if (res
&& is_gimple_min_invariant (res
))
2734 /* The following set of functions are supposed to fold references using
2735 their constant initializers. */
2737 static tree
fold_ctor_reference (tree type
, tree ctor
,
2738 unsigned HOST_WIDE_INT offset
,
2739 unsigned HOST_WIDE_INT size
, tree
);
2741 /* See if we can find constructor defining value of BASE.
2742 When we know the consructor with constant offset (such as
2743 base is array[40] and we do know constructor of array), then
2744 BIT_OFFSET is adjusted accordingly.
2746 As a special case, return error_mark_node when constructor
2747 is not explicitly available, but it is known to be zero
2748 such as 'static const int a;'. */
2750 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2751 tree (*valueize
)(tree
))
2753 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2754 if (TREE_CODE (base
) == MEM_REF
)
2756 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2758 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
2760 *bit_offset
+= (mem_ref_offset (base
).low
2765 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2766 base
= valueize (TREE_OPERAND (base
, 0));
2767 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2769 base
= TREE_OPERAND (base
, 0);
2772 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2773 DECL_INITIAL. If BASE is a nested reference into another
2774 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2775 the inner reference. */
2776 switch (TREE_CODE (base
))
2781 tree init
= ctor_for_folding (base
);
2783 /* Our semantic is exact opposite of ctor_for_folding;
2784 NULL means unknown, while error_mark_node is 0. */
2785 if (init
== error_mark_node
)
2788 return error_mark_node
;
2794 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2795 if (max_size
== -1 || size
!= max_size
)
2797 *bit_offset
+= bit_offset2
;
2798 return get_base_constructor (base
, bit_offset
, valueize
);
2809 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2810 to the memory at bit OFFSET.
2812 We do only simple job of folding byte accesses. */
2815 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2816 unsigned HOST_WIDE_INT offset
,
2817 unsigned HOST_WIDE_INT size
)
2819 if (INTEGRAL_TYPE_P (type
)
2820 && (TYPE_MODE (type
)
2821 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2822 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2824 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2825 && size
== BITS_PER_UNIT
2826 && !(offset
% BITS_PER_UNIT
))
2828 offset
/= BITS_PER_UNIT
;
2829 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2830 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2833 const char a[20]="hello";
2836 might lead to offset greater than string length. In this case we
2837 know value is either initialized to 0 or out of bounds. Return 0
2839 return build_zero_cst (type
);
2844 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2845 SIZE to the memory at bit OFFSET. */
2848 fold_array_ctor_reference (tree type
, tree ctor
,
2849 unsigned HOST_WIDE_INT offset
,
2850 unsigned HOST_WIDE_INT size
,
2853 unsigned HOST_WIDE_INT cnt
;
2855 double_int low_bound
, elt_size
;
2856 double_int index
, max_index
;
2857 double_int access_index
;
2858 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2859 HOST_WIDE_INT inner_offset
;
2861 /* Compute low bound and elt size. */
2862 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2863 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2864 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2866 /* Static constructors for variably sized objects makes no sense. */
2867 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2868 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2869 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2872 low_bound
= double_int_zero
;
2873 /* Static constructors for variably sized objects makes no sense. */
2874 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2877 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2880 /* We can handle only constantly sized accesses that are known to not
2881 be larger than size of array element. */
2882 if (!TYPE_SIZE_UNIT (type
)
2883 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2884 || elt_size
.slt (tree_to_double_int (TYPE_SIZE_UNIT (type
)))
2885 || elt_size
.is_zero ())
2888 /* Compute the array index we look for. */
2889 access_index
= double_int::from_uhwi (offset
/ BITS_PER_UNIT
)
2890 .udiv (elt_size
, TRUNC_DIV_EXPR
);
2891 access_index
+= low_bound
;
2893 access_index
= access_index
.ext (TYPE_PRECISION (index_type
),
2894 TYPE_UNSIGNED (index_type
));
2896 /* And offset within the access. */
2897 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
2899 /* See if the array field is large enough to span whole access. We do not
2900 care to fold accesses spanning multiple array indexes. */
2901 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
2904 index
= low_bound
- double_int_one
;
2906 index
= index
.ext (TYPE_PRECISION (index_type
), TYPE_UNSIGNED (index_type
));
2908 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2910 /* Array constructor might explicitely set index, or specify range
2911 or leave index NULL meaning that it is next index after previous
2915 if (TREE_CODE (cfield
) == INTEGER_CST
)
2916 max_index
= index
= tree_to_double_int (cfield
);
2919 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2920 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2921 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2926 index
+= double_int_one
;
2928 index
= index
.ext (TYPE_PRECISION (index_type
),
2929 TYPE_UNSIGNED (index_type
));
2933 /* Do we have match? */
2934 if (access_index
.cmp (index
, 1) >= 0
2935 && access_index
.cmp (max_index
, 1) <= 0)
2936 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
2939 /* When memory is not explicitely mentioned in constructor,
2940 it is 0 (or out of range). */
2941 return build_zero_cst (type
);
2944 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2945 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2948 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2949 unsigned HOST_WIDE_INT offset
,
2950 unsigned HOST_WIDE_INT size
,
2953 unsigned HOST_WIDE_INT cnt
;
2956 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2959 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2960 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2961 tree field_size
= DECL_SIZE (cfield
);
2962 double_int bitoffset
;
2963 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2964 double_int bits_per_unit_cst
= double_int::from_uhwi (BITS_PER_UNIT
);
2965 double_int bitoffset_end
, access_end
;
2967 /* Variable sized objects in static constructors makes no sense,
2968 but field_size can be NULL for flexible array members. */
2969 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2970 && TREE_CODE (byte_offset
) == INTEGER_CST
2971 && (field_size
!= NULL_TREE
2972 ? TREE_CODE (field_size
) == INTEGER_CST
2973 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2975 /* Compute bit offset of the field. */
2976 bitoffset
= tree_to_double_int (field_offset
)
2977 + byte_offset_cst
* bits_per_unit_cst
;
2978 /* Compute bit offset where the field ends. */
2979 if (field_size
!= NULL_TREE
)
2980 bitoffset_end
= bitoffset
+ tree_to_double_int (field_size
);
2982 bitoffset_end
= double_int_zero
;
2984 access_end
= double_int::from_uhwi (offset
)
2985 + double_int::from_uhwi (size
);
2987 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2988 [BITOFFSET, BITOFFSET_END)? */
2989 if (access_end
.cmp (bitoffset
, 0) > 0
2990 && (field_size
== NULL_TREE
2991 || double_int::from_uhwi (offset
).slt (bitoffset_end
)))
2993 double_int inner_offset
= double_int::from_uhwi (offset
) - bitoffset
;
2994 /* We do have overlap. Now see if field is large enough to
2995 cover the access. Give up for accesses spanning multiple
2997 if (access_end
.cmp (bitoffset_end
, 0) > 0)
2999 if (double_int::from_uhwi (offset
).slt (bitoffset
))
3001 return fold_ctor_reference (type
, cval
,
3002 inner_offset
.to_uhwi (), size
,
3006 /* When memory is not explicitely mentioned in constructor, it is 0. */
3007 return build_zero_cst (type
);
3010 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3011 to the memory at bit OFFSET. */
3014 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
3015 unsigned HOST_WIDE_INT size
, tree from_decl
)
3019 /* We found the field with exact match. */
3020 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
3022 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
3024 /* We are at the end of walk, see if we can view convert the
3026 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
3027 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3028 && operand_equal_p (TYPE_SIZE (type
),
3029 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
3031 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
3032 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
3037 if (TREE_CODE (ctor
) == STRING_CST
)
3038 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
3039 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
3042 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
3043 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
3044 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
3047 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
3054 /* Return the tree representing the element referenced by T if T is an
3055 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3056 names using VALUEIZE. Return NULL_TREE otherwise. */
3059 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
3061 tree ctor
, idx
, base
;
3062 HOST_WIDE_INT offset
, size
, max_size
;
3065 if (TREE_THIS_VOLATILE (t
))
3068 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3069 return get_symbol_constant_value (t
);
3071 tem
= fold_read_from_constant_string (t
);
3075 switch (TREE_CODE (t
))
3078 case ARRAY_RANGE_REF
:
3079 /* Constant indexes are handled well by get_base_constructor.
3080 Only special case variable offsets.
3081 FIXME: This code can't handle nested references with variable indexes
3082 (they will be handled only by iteration of ccp). Perhaps we can bring
3083 get_ref_base_and_extent here and make it use a valueize callback. */
3084 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3086 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3087 && TREE_CODE (idx
) == INTEGER_CST
)
3089 tree low_bound
, unit_size
;
3092 /* If the resulting bit-offset is constant, track it. */
3093 if ((low_bound
= array_ref_low_bound (t
),
3094 TREE_CODE (low_bound
) == INTEGER_CST
)
3095 && (unit_size
= array_ref_element_size (t
),
3096 tree_fits_uhwi_p (unit_size
))
3097 && (doffset
= (TREE_INT_CST (idx
) - TREE_INT_CST (low_bound
))
3098 .sext (TYPE_PRECISION (TREE_TYPE (idx
))),
3099 doffset
.fits_shwi ()))
3101 offset
= doffset
.to_shwi ();
3102 offset
*= tree_to_uhwi (unit_size
);
3103 offset
*= BITS_PER_UNIT
;
3105 base
= TREE_OPERAND (t
, 0);
3106 ctor
= get_base_constructor (base
, &offset
, valueize
);
3107 /* Empty constructor. Always fold to 0. */
3108 if (ctor
== error_mark_node
)
3109 return build_zero_cst (TREE_TYPE (t
));
3110 /* Out of bound array access. Value is undefined,
3114 /* We can not determine ctor. */
3117 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3118 tree_to_uhwi (unit_size
)
3127 case TARGET_MEM_REF
:
3129 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3130 ctor
= get_base_constructor (base
, &offset
, valueize
);
3132 /* Empty constructor. Always fold to 0. */
3133 if (ctor
== error_mark_node
)
3134 return build_zero_cst (TREE_TYPE (t
));
3135 /* We do not know precise address. */
3136 if (max_size
== -1 || max_size
!= size
)
3138 /* We can not determine ctor. */
3142 /* Out of bound array access. Value is undefined, but don't fold. */
3146 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3152 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3153 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3154 return fold_build1_loc (EXPR_LOCATION (t
),
3155 TREE_CODE (t
), TREE_TYPE (t
), c
);
3167 fold_const_aggregate_ref (tree t
)
3169 return fold_const_aggregate_ref_1 (t
, NULL
);
3172 /* Lookup virtual method with index TOKEN in a virtual table V
3174 Set CAN_REFER if non-NULL to false if method
3175 is not referable or if the virtual table is ill-formed (such as rewriten
3176 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3179 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
3181 unsigned HOST_WIDE_INT offset
,
3184 tree vtable
= v
, init
, fn
;
3185 unsigned HOST_WIDE_INT size
;
3186 unsigned HOST_WIDE_INT elt_size
, access_index
;
3192 /* First of all double check we have virtual table. */
3193 if (TREE_CODE (v
) != VAR_DECL
3194 || !DECL_VIRTUAL_P (v
))
3196 gcc_assert (in_lto_p
);
3197 /* Pass down that we lost track of the target. */
3203 init
= ctor_for_folding (v
);
3205 /* The virtual tables should always be born with constructors
3206 and we always should assume that they are avaialble for
3207 folding. At the moment we do not stream them in all cases,
3208 but it should never happen that ctor seem unreachable. */
3210 if (init
== error_mark_node
)
3212 gcc_assert (in_lto_p
);
3213 /* Pass down that we lost track of the target. */
3218 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3219 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
3220 offset
*= BITS_PER_UNIT
;
3221 offset
+= token
* size
;
3223 /* Lookup the value in the constructor that is assumed to be array.
3224 This is equivalent to
3225 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3226 offset, size, NULL);
3227 but in a constant time. We expect that frontend produced a simple
3228 array without indexed initializers. */
3230 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
3231 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
3232 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
3233 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
3235 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
3236 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
3238 /* This code makes an assumption that there are no
3239 indexed fileds produced by C++ FE, so we can directly index the array. */
3240 if (access_index
< CONSTRUCTOR_NELTS (init
))
3242 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
3243 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
3249 /* For type inconsistent program we may end up looking up virtual method
3250 in virtual table that does not contain TOKEN entries. We may overrun
3251 the virtual table and pick up a constant or RTTI info pointer.
3252 In any case the call is undefined. */
3254 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
3255 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
3256 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3259 fn
= TREE_OPERAND (fn
, 0);
3261 /* When cgraph node is missing and function is not public, we cannot
3262 devirtualize. This can happen in WHOPR when the actual method
3263 ends up in other partition, because we found devirtualization
3264 possibility too late. */
3265 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3276 /* Make sure we create a cgraph node for functions we'll reference.
3277 They can be non-existent if the reference comes from an entry
3278 of an external vtable for example. */
3279 cgraph_get_create_node (fn
);
3284 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3285 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3286 KNOWN_BINFO carries the binfo describing the true type of
3287 OBJ_TYPE_REF_OBJECT(REF).
3288 Set CAN_REFER if non-NULL to false if method
3289 is not referable or if the virtual table is ill-formed (such as rewriten
3290 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3293 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
3296 unsigned HOST_WIDE_INT offset
;
3299 v
= BINFO_VTABLE (known_binfo
);
3300 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3304 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
3310 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
3313 /* Return true iff VAL is a gimple expression that is known to be
3314 non-negative. Restricted to floating-point inputs. */
3317 gimple_val_nonnegative_real_p (tree val
)
3321 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3323 /* Use existing logic for non-gimple trees. */
3324 if (tree_expr_nonnegative_p (val
))
3327 if (TREE_CODE (val
) != SSA_NAME
)
3330 /* Currently we look only at the immediately defining statement
3331 to make this determination, since recursion on defining
3332 statements of operands can lead to quadratic behavior in the
3333 worst case. This is expected to catch almost all occurrences
3334 in practice. It would be possible to implement limited-depth
3335 recursion if important cases are lost. Alternatively, passes
3336 that need this information (such as the pow/powi lowering code
3337 in the cse_sincos pass) could be revised to provide it through
3338 dataflow propagation. */
3340 def_stmt
= SSA_NAME_DEF_STMT (val
);
3342 if (is_gimple_assign (def_stmt
))
3346 /* See fold-const.c:tree_expr_nonnegative_p for additional
3347 cases that could be handled with recursion. */
3349 switch (gimple_assign_rhs_code (def_stmt
))
3352 /* Always true for floating-point operands. */
3356 /* True if the two operands are identical (since we are
3357 restricted to floating-point inputs). */
3358 op0
= gimple_assign_rhs1 (def_stmt
);
3359 op1
= gimple_assign_rhs2 (def_stmt
);
3362 || operand_equal_p (op0
, op1
, 0))
3369 else if (is_gimple_call (def_stmt
))
3371 tree fndecl
= gimple_call_fndecl (def_stmt
);
3373 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3377 switch (DECL_FUNCTION_CODE (fndecl
))
3379 CASE_FLT_FN (BUILT_IN_ACOS
):
3380 CASE_FLT_FN (BUILT_IN_ACOSH
):
3381 CASE_FLT_FN (BUILT_IN_CABS
):
3382 CASE_FLT_FN (BUILT_IN_COSH
):
3383 CASE_FLT_FN (BUILT_IN_ERFC
):
3384 CASE_FLT_FN (BUILT_IN_EXP
):
3385 CASE_FLT_FN (BUILT_IN_EXP10
):
3386 CASE_FLT_FN (BUILT_IN_EXP2
):
3387 CASE_FLT_FN (BUILT_IN_FABS
):
3388 CASE_FLT_FN (BUILT_IN_FDIM
):
3389 CASE_FLT_FN (BUILT_IN_HYPOT
):
3390 CASE_FLT_FN (BUILT_IN_POW10
):
3393 CASE_FLT_FN (BUILT_IN_SQRT
):
3394 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3395 nonnegative inputs. */
3396 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3401 CASE_FLT_FN (BUILT_IN_POWI
):
3402 /* True if the second argument is an even integer. */
3403 arg1
= gimple_call_arg (def_stmt
, 1);
3405 if (TREE_CODE (arg1
) == INTEGER_CST
3406 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3411 CASE_FLT_FN (BUILT_IN_POW
):
3412 /* True if the second argument is an even integer-valued
3414 arg1
= gimple_call_arg (def_stmt
, 1);
3416 if (TREE_CODE (arg1
) == REAL_CST
)
3421 c
= TREE_REAL_CST (arg1
);
3422 n
= real_to_integer (&c
);
3426 REAL_VALUE_TYPE cint
;
3427 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3428 if (real_identical (&c
, &cint
))
3444 /* Given a pointer value OP0, return a simplified version of an
3445 indirection through OP0, or NULL_TREE if no simplification is
3446 possible. Note that the resulting type may be different from
3447 the type pointed to in the sense that it is still compatible
3448 from the langhooks point of view. */
3451 gimple_fold_indirect_ref (tree t
)
3453 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
3458 subtype
= TREE_TYPE (sub
);
3459 if (!POINTER_TYPE_P (subtype
))
3462 if (TREE_CODE (sub
) == ADDR_EXPR
)
3464 tree op
= TREE_OPERAND (sub
, 0);
3465 tree optype
= TREE_TYPE (op
);
3467 if (useless_type_conversion_p (type
, optype
))
3470 /* *(foo *)&fooarray => fooarray[0] */
3471 if (TREE_CODE (optype
) == ARRAY_TYPE
3472 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
3473 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3475 tree type_domain
= TYPE_DOMAIN (optype
);
3476 tree min_val
= size_zero_node
;
3477 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
3478 min_val
= TYPE_MIN_VALUE (type_domain
);
3479 if (TREE_CODE (min_val
) == INTEGER_CST
)
3480 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
3482 /* *(foo *)&complexfoo => __real__ complexfoo */
3483 else if (TREE_CODE (optype
) == COMPLEX_TYPE
3484 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3485 return fold_build1 (REALPART_EXPR
, type
, op
);
3486 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3487 else if (TREE_CODE (optype
) == VECTOR_TYPE
3488 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3490 tree part_width
= TYPE_SIZE (type
);
3491 tree index
= bitsize_int (0);
3492 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
3496 /* *(p + CST) -> ... */
3497 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
3498 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
3500 tree addr
= TREE_OPERAND (sub
, 0);
3501 tree off
= TREE_OPERAND (sub
, 1);
3505 addrtype
= TREE_TYPE (addr
);
3507 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3508 if (TREE_CODE (addr
) == ADDR_EXPR
3509 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
3510 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
3511 && tree_fits_uhwi_p (off
))
3513 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
3514 tree part_width
= TYPE_SIZE (type
);
3515 unsigned HOST_WIDE_INT part_widthi
3516 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
3517 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
3518 tree index
= bitsize_int (indexi
);
3519 if (offset
/ part_widthi
3520 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
3521 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
3525 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3526 if (TREE_CODE (addr
) == ADDR_EXPR
3527 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
3528 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
3530 tree size
= TYPE_SIZE_UNIT (type
);
3531 if (tree_int_cst_equal (size
, off
))
3532 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
3535 /* *(p + CST) -> MEM_REF <p, CST>. */
3536 if (TREE_CODE (addr
) != ADDR_EXPR
3537 || DECL_P (TREE_OPERAND (addr
, 0)))
3538 return fold_build2 (MEM_REF
, type
,
3540 build_int_cst_wide (ptype
,
3541 TREE_INT_CST_LOW (off
),
3542 TREE_INT_CST_HIGH (off
)));
3545 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3546 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
3547 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
3548 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
3551 tree min_val
= size_zero_node
;
3553 sub
= gimple_fold_indirect_ref (sub
);
3555 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
3556 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
3557 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
3558 min_val
= TYPE_MIN_VALUE (type_domain
);
3559 if (TREE_CODE (min_val
) == INTEGER_CST
)
3560 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
3566 /* Return true if CODE is an operation that when operating on signed
3567 integer types involves undefined behavior on overflow and the
3568 operation can be expressed with unsigned arithmetic. */
3571 arith_code_with_undefined_signed_overflow (tree_code code
)
3579 case POINTER_PLUS_EXPR
:
3586 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
3587 operation that can be transformed to unsigned arithmetic by converting
3588 its operand, carrying out the operation in the corresponding unsigned
3589 type and converting the result back to the original type.
3591 Returns a sequence of statements that replace STMT and also contain
3592 a modified form of STMT itself. */
3595 rewrite_to_defined_overflow (gimple stmt
)
3597 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3599 fprintf (dump_file
, "rewriting stmt with undefined signed "
3601 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3604 tree lhs
= gimple_assign_lhs (stmt
);
3605 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
3606 gimple_seq stmts
= NULL
;
3607 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
3609 gimple_seq stmts2
= NULL
;
3610 gimple_set_op (stmt
, i
,
3611 force_gimple_operand (fold_convert (type
,
3612 gimple_op (stmt
, i
)),
3613 &stmts2
, true, NULL_TREE
));
3614 gimple_seq_add_seq (&stmts
, stmts2
);
3616 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
3617 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
3618 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
3619 gimple_seq_add_stmt (&stmts
, stmt
);
3620 gimple cvt
= gimple_build_assign_with_ops
3621 (NOP_EXPR
, lhs
, gimple_assign_lhs (stmt
), NULL_TREE
);
3622 gimple_seq_add_stmt (&stmts
, cvt
);