Merge aosp-toolchain/gcc/gcc-4_9 changes.
[official-gcc.git] / integration / gcc / gimple-fold.c
blobdd2aed73e77c902f9ddbec0df8938c36e65fc066
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "dumpfile.h"
29 #include "tree-ssa.h"
30 #include "tree-ssa-propagate.h"
31 #include "target.h"
32 #include "gimple-fold.h"
33 #include "ipa-utils.h"
34 #include "gimple-pretty-print.h"
36 /* Return true when DECL can be referenced from current unit.
37 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
38 We can get declarations that are not possible to reference for various
39 reasons:
41 1) When analyzing C++ virtual tables.
42 C++ virtual tables do have known constructors even
43 when they are keyed to other compilation unit.
44 Those tables can contain pointers to methods and vars
45 in other units. Those methods have both STATIC and EXTERNAL
46 set.
47 2) In WHOPR mode devirtualization might lead to reference
48 to method that was partitioned elsehwere.
49 In this case we have static VAR_DECL or FUNCTION_DECL
50 that has no corresponding callgraph/varpool node
51 declaring the body.
52 3) COMDAT functions referred by external vtables that
53 we devirtualize only during final copmilation stage.
54 At this time we already decided that we will not output
55 the function body and thus we can't reference the symbol
56 directly. */
58 static bool
59 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
61 struct varpool_node *vnode;
62 struct cgraph_node *node;
63 symtab_node snode;
65 if (DECL_ABSTRACT (decl))
66 return false;
68 /* We are concerned only about static/external vars and functions. */
69 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
70 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
71 return true;
73 /* Static objects can be referred only if they was not optimized out yet. */
74 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
76 snode = symtab_get_node (decl);
77 if (!snode)
78 return false;
79 node = dyn_cast <cgraph_node> (snode);
80 return !node || !node->global.inlined_to;
83 /* We will later output the initializer, so we can refer to it.
84 So we are concerned only when DECL comes from initializer of
85 external var. */
86 if (!from_decl
87 || TREE_CODE (from_decl) != VAR_DECL
88 || !DECL_EXTERNAL (from_decl)
89 || (flag_ltrans
90 && symtab_get_node (from_decl)->symbol.in_other_partition))
91 return true;
92 /* We are folding reference from external vtable. The vtable may reffer
93 to a symbol keyed to other compilation unit. The other compilation
94 unit may be in separate DSO and the symbol may be hidden. */
95 if (DECL_VISIBILITY_SPECIFIED (decl)
96 && DECL_EXTERNAL (decl)
97 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition))
98 return false;
99 /* When function is public, we always can introduce new reference.
100 Exception are the COMDAT functions where introducing a direct
101 reference imply need to include function body in the curren tunit. */
102 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
103 return true;
104 /* We are not at ltrans stage; so don't worry about WHOPR.
105 Also when still gimplifying all referred comdat functions will be
106 produced.
108 As observed in PR20991 for already optimized out comdat virtual functions
109 it may be tempting to not necessarily give up because the copy will be
110 output elsewhere when corresponding vtable is output.
111 This is however not possible - ABI specify that COMDATs are output in
112 units where they are used and when the other unit was compiled with LTO
113 it is possible that vtable was kept public while the function itself
114 was privatized. */
115 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
116 return true;
118 /* OK we are seeing either COMDAT or static variable. In this case we must
119 check that the definition is still around so we can refer it. */
120 if (TREE_CODE (decl) == FUNCTION_DECL)
122 node = cgraph_get_node (decl);
123 /* Check that we still have function body and that we didn't took
124 the decision to eliminate offline copy of the function yet.
125 The second is important when devirtualization happens during final
126 compilation stage when making a new reference no longer makes callee
127 to be compiled. */
128 if (!node || !node->symbol.definition || node->global.inlined_to)
130 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
131 return false;
134 else if (TREE_CODE (decl) == VAR_DECL)
136 vnode = varpool_get_node (decl);
137 if (!vnode || !vnode->symbol.definition)
139 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
140 return false;
143 return true;
146 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
147 acceptable form for is_gimple_min_invariant.
148 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
150 tree
151 canonicalize_constructor_val (tree cval, tree from_decl)
153 tree orig_cval = cval;
154 STRIP_NOPS (cval);
155 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
156 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
158 tree ptr = TREE_OPERAND (cval, 0);
159 if (is_gimple_min_invariant (ptr))
160 cval = build1_loc (EXPR_LOCATION (cval),
161 ADDR_EXPR, TREE_TYPE (ptr),
162 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
163 ptr,
164 fold_convert (ptr_type_node,
165 TREE_OPERAND (cval, 1))));
167 if (TREE_CODE (cval) == ADDR_EXPR)
169 tree base = NULL_TREE;
170 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
172 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
173 if (base)
174 TREE_OPERAND (cval, 0) = base;
176 else
177 base = get_base_address (TREE_OPERAND (cval, 0));
178 if (!base)
179 return NULL_TREE;
181 if ((TREE_CODE (base) == VAR_DECL
182 || TREE_CODE (base) == FUNCTION_DECL)
183 && !can_refer_decl_in_current_unit_p (base, from_decl))
184 return NULL_TREE;
185 if (TREE_CODE (base) == VAR_DECL)
186 TREE_ADDRESSABLE (base) = 1;
187 else if (TREE_CODE (base) == FUNCTION_DECL)
189 /* Make sure we create a cgraph node for functions we'll reference.
190 They can be non-existent if the reference comes from an entry
191 of an external vtable for example. */
192 cgraph_get_create_real_symbol_node (base);
194 /* Fixup types in global initializers. */
195 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
196 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
198 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
199 cval = fold_convert (TREE_TYPE (orig_cval), cval);
200 return cval;
202 return orig_cval;
205 /* If SYM is a constant variable with known value, return the value.
206 NULL_TREE is returned otherwise. */
208 tree
209 get_symbol_constant_value (tree sym)
211 tree val = ctor_for_folding (sym);
212 if (val != error_mark_node)
214 if (val)
216 val = canonicalize_constructor_val (unshare_expr (val), sym);
217 if (val && is_gimple_min_invariant (val))
218 return val;
219 else
220 return NULL_TREE;
222 /* Variables declared 'const' without an initializer
223 have zero as the initializer if they may not be
224 overridden at link or run time. */
225 if (!val
226 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
227 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
228 return build_zero_cst (TREE_TYPE (sym));
231 return NULL_TREE;
236 /* Subroutine of fold_stmt. We perform several simplifications of the
237 memory reference tree EXPR and make sure to re-gimplify them properly
238 after propagation of constant addresses. IS_LHS is true if the
239 reference is supposed to be an lvalue. */
241 static tree
242 maybe_fold_reference (tree expr, bool is_lhs)
244 tree *t = &expr;
245 tree result;
247 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
248 || TREE_CODE (expr) == REALPART_EXPR
249 || TREE_CODE (expr) == IMAGPART_EXPR)
250 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
251 return fold_unary_loc (EXPR_LOCATION (expr),
252 TREE_CODE (expr),
253 TREE_TYPE (expr),
254 TREE_OPERAND (expr, 0));
255 else if (TREE_CODE (expr) == BIT_FIELD_REF
256 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
257 return fold_ternary_loc (EXPR_LOCATION (expr),
258 TREE_CODE (expr),
259 TREE_TYPE (expr),
260 TREE_OPERAND (expr, 0),
261 TREE_OPERAND (expr, 1),
262 TREE_OPERAND (expr, 2));
264 while (handled_component_p (*t))
265 t = &TREE_OPERAND (*t, 0);
267 /* Canonicalize MEM_REFs invariant address operand. Do this first
268 to avoid feeding non-canonical MEM_REFs elsewhere. */
269 if (TREE_CODE (*t) == MEM_REF
270 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
272 bool volatile_p = TREE_THIS_VOLATILE (*t);
273 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
274 TREE_OPERAND (*t, 0),
275 TREE_OPERAND (*t, 1));
276 if (tem)
278 TREE_THIS_VOLATILE (tem) = volatile_p;
279 *t = tem;
280 tem = maybe_fold_reference (expr, is_lhs);
281 if (tem)
282 return tem;
283 return expr;
287 if (!is_lhs
288 && (result = fold_const_aggregate_ref (expr))
289 && is_gimple_min_invariant (result))
290 return result;
292 /* Fold back MEM_REFs to reference trees. */
293 if (TREE_CODE (*t) == MEM_REF
294 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
295 && integer_zerop (TREE_OPERAND (*t, 1))
296 && (TREE_THIS_VOLATILE (*t)
297 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
298 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
299 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
300 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
301 /* We have to look out here to not drop a required conversion
302 from the rhs to the lhs if is_lhs, but we don't have the
303 rhs here to verify that. Thus require strict type
304 compatibility. */
305 && types_compatible_p (TREE_TYPE (*t),
306 TREE_TYPE (TREE_OPERAND
307 (TREE_OPERAND (*t, 0), 0))))
309 tree tem;
310 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
311 tem = maybe_fold_reference (expr, is_lhs);
312 if (tem)
313 return tem;
314 return expr;
316 else if (TREE_CODE (*t) == TARGET_MEM_REF)
318 tree tem = maybe_fold_tmr (*t);
319 if (tem)
321 *t = tem;
322 tem = maybe_fold_reference (expr, is_lhs);
323 if (tem)
324 return tem;
325 return expr;
329 return NULL_TREE;
333 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
334 replacement rhs for the statement or NULL_TREE if no simplification
335 could be made. It is assumed that the operands have been previously
336 folded. */
338 static tree
339 fold_gimple_assign (gimple_stmt_iterator *si)
341 gimple stmt = gsi_stmt (*si);
342 enum tree_code subcode = gimple_assign_rhs_code (stmt);
343 location_t loc = gimple_location (stmt);
345 tree result = NULL_TREE;
347 switch (get_gimple_rhs_class (subcode))
349 case GIMPLE_SINGLE_RHS:
351 tree rhs = gimple_assign_rhs1 (stmt);
353 if (REFERENCE_CLASS_P (rhs))
354 return maybe_fold_reference (rhs, false);
356 else if (TREE_CODE (rhs) == ADDR_EXPR)
358 tree ref = TREE_OPERAND (rhs, 0);
359 tree tem = maybe_fold_reference (ref, true);
360 if (tem
361 && TREE_CODE (tem) == MEM_REF
362 && integer_zerop (TREE_OPERAND (tem, 1)))
363 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
364 else if (tem)
365 result = fold_convert (TREE_TYPE (rhs),
366 build_fold_addr_expr_loc (loc, tem));
367 else if (TREE_CODE (ref) == MEM_REF
368 && integer_zerop (TREE_OPERAND (ref, 1)))
369 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
372 else if (TREE_CODE (rhs) == CONSTRUCTOR
373 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
374 && (CONSTRUCTOR_NELTS (rhs)
375 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
377 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
378 unsigned i;
379 tree val;
381 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
382 if (TREE_CODE (val) != INTEGER_CST
383 && TREE_CODE (val) != REAL_CST
384 && TREE_CODE (val) != FIXED_CST)
385 return NULL_TREE;
387 return build_vector_from_ctor (TREE_TYPE (rhs),
388 CONSTRUCTOR_ELTS (rhs));
391 else if (DECL_P (rhs))
392 return get_symbol_constant_value (rhs);
394 /* If we couldn't fold the RHS, hand over to the generic
395 fold routines. */
396 if (result == NULL_TREE)
397 result = fold (rhs);
399 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
400 that may have been added by fold, and "useless" type
401 conversions that might now be apparent due to propagation. */
402 STRIP_USELESS_TYPE_CONVERSION (result);
404 if (result != rhs && valid_gimple_rhs_p (result))
405 return result;
407 return NULL_TREE;
409 break;
411 case GIMPLE_UNARY_RHS:
413 tree rhs = gimple_assign_rhs1 (stmt);
415 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
416 if (result)
418 /* If the operation was a conversion do _not_ mark a
419 resulting constant with TREE_OVERFLOW if the original
420 constant was not. These conversions have implementation
421 defined behavior and retaining the TREE_OVERFLOW flag
422 here would confuse later passes such as VRP. */
423 if (CONVERT_EXPR_CODE_P (subcode)
424 && TREE_CODE (result) == INTEGER_CST
425 && TREE_CODE (rhs) == INTEGER_CST)
426 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
428 STRIP_USELESS_TYPE_CONVERSION (result);
429 if (valid_gimple_rhs_p (result))
430 return result;
433 break;
435 case GIMPLE_BINARY_RHS:
436 /* Try to canonicalize for boolean-typed X the comparisons
437 X == 0, X == 1, X != 0, and X != 1. */
438 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
439 || gimple_assign_rhs_code (stmt) == NE_EXPR)
441 tree lhs = gimple_assign_lhs (stmt);
442 tree op1 = gimple_assign_rhs1 (stmt);
443 tree op2 = gimple_assign_rhs2 (stmt);
444 tree type = TREE_TYPE (op1);
446 /* Check whether the comparison operands are of the same boolean
447 type as the result type is.
448 Check that second operand is an integer-constant with value
449 one or zero. */
450 if (TREE_CODE (op2) == INTEGER_CST
451 && (integer_zerop (op2) || integer_onep (op2))
452 && useless_type_conversion_p (TREE_TYPE (lhs), type))
454 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
455 bool is_logical_not = false;
457 /* X == 0 and X != 1 is a logical-not.of X
458 X == 1 and X != 0 is X */
459 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
460 || (cmp_code == NE_EXPR && integer_onep (op2)))
461 is_logical_not = true;
463 if (is_logical_not == false)
464 result = op1;
465 /* Only for one-bit precision typed X the transformation
466 !X -> ~X is valied. */
467 else if (TYPE_PRECISION (type) == 1)
468 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
469 type, op1);
470 /* Otherwise we use !X -> X ^ 1. */
471 else
472 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
473 type, op1, build_int_cst (type, 1));
478 if (!result)
479 result = fold_binary_loc (loc, subcode,
480 TREE_TYPE (gimple_assign_lhs (stmt)),
481 gimple_assign_rhs1 (stmt),
482 gimple_assign_rhs2 (stmt));
484 if (result)
486 STRIP_USELESS_TYPE_CONVERSION (result);
487 if (valid_gimple_rhs_p (result))
488 return result;
490 break;
492 case GIMPLE_TERNARY_RHS:
493 /* Try to fold a conditional expression. */
494 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
496 tree op0 = gimple_assign_rhs1 (stmt);
497 tree tem;
498 bool set = false;
499 location_t cond_loc = gimple_location (stmt);
501 if (COMPARISON_CLASS_P (op0))
503 fold_defer_overflow_warnings ();
504 tem = fold_binary_loc (cond_loc,
505 TREE_CODE (op0), TREE_TYPE (op0),
506 TREE_OPERAND (op0, 0),
507 TREE_OPERAND (op0, 1));
508 /* This is actually a conditional expression, not a GIMPLE
509 conditional statement, however, the valid_gimple_rhs_p
510 test still applies. */
511 set = (tem && is_gimple_condexpr (tem)
512 && valid_gimple_rhs_p (tem));
513 fold_undefer_overflow_warnings (set, stmt, 0);
515 else if (is_gimple_min_invariant (op0))
517 tem = op0;
518 set = true;
520 else
521 return NULL_TREE;
523 if (set)
524 result = fold_build3_loc (cond_loc, COND_EXPR,
525 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
526 gimple_assign_rhs2 (stmt),
527 gimple_assign_rhs3 (stmt));
530 if (!result)
531 result = fold_ternary_loc (loc, subcode,
532 TREE_TYPE (gimple_assign_lhs (stmt)),
533 gimple_assign_rhs1 (stmt),
534 gimple_assign_rhs2 (stmt),
535 gimple_assign_rhs3 (stmt));
537 if (result)
539 STRIP_USELESS_TYPE_CONVERSION (result);
540 if (valid_gimple_rhs_p (result))
541 return result;
543 break;
545 case GIMPLE_INVALID_RHS:
546 gcc_unreachable ();
549 return NULL_TREE;
552 /* Attempt to fold a conditional statement. Return true if any changes were
553 made. We only attempt to fold the condition expression, and do not perform
554 any transformation that would require alteration of the cfg. It is
555 assumed that the operands have been previously folded. */
557 static bool
558 fold_gimple_cond (gimple stmt)
560 tree result = fold_binary_loc (gimple_location (stmt),
561 gimple_cond_code (stmt),
562 boolean_type_node,
563 gimple_cond_lhs (stmt),
564 gimple_cond_rhs (stmt));
566 if (result)
568 STRIP_USELESS_TYPE_CONVERSION (result);
569 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
571 gimple_cond_set_condition_from_tree (stmt, result);
572 return true;
576 return false;
579 /* Convert EXPR into a GIMPLE value suitable for substitution on the
580 RHS of an assignment. Insert the necessary statements before
581 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
582 is replaced. If the call is expected to produces a result, then it
583 is replaced by an assignment of the new RHS to the result variable.
584 If the result is to be ignored, then the call is replaced by a
585 GIMPLE_NOP. A proper VDEF chain is retained by making the first
586 VUSE and the last VDEF of the whole sequence be the same as the replaced
587 statement and using new SSA names for stores in between. */
589 void
590 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
592 tree lhs;
593 gimple stmt, new_stmt;
594 gimple_stmt_iterator i;
595 gimple_seq stmts = NULL;
596 struct gimplify_ctx gctx;
597 gimple laststore;
598 tree reaching_vuse;
600 stmt = gsi_stmt (*si_p);
602 gcc_assert (is_gimple_call (stmt));
604 push_gimplify_context (&gctx);
605 gctx.into_ssa = gimple_in_ssa_p (cfun);
607 lhs = gimple_call_lhs (stmt);
608 if (lhs == NULL_TREE)
610 gimplify_and_add (expr, &stmts);
611 /* We can end up with folding a memcpy of an empty class assignment
612 which gets optimized away by C++ gimplification. */
613 if (gimple_seq_empty_p (stmts))
615 pop_gimplify_context (NULL);
616 if (gimple_in_ssa_p (cfun))
618 unlink_stmt_vdef (stmt);
619 release_defs (stmt);
621 gsi_replace (si_p, gimple_build_nop (), true);
622 return;
625 else
627 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
628 new_stmt = gimple_build_assign (lhs, tmp);
629 i = gsi_last (stmts);
630 gsi_insert_after_without_update (&i, new_stmt,
631 GSI_CONTINUE_LINKING);
634 pop_gimplify_context (NULL);
636 if (gimple_has_location (stmt))
637 annotate_all_with_location (stmts, gimple_location (stmt));
639 /* First iterate over the replacement statements backward, assigning
640 virtual operands to their defining statements. */
641 laststore = NULL;
642 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
644 new_stmt = gsi_stmt (i);
645 if ((gimple_assign_single_p (new_stmt)
646 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
647 || (is_gimple_call (new_stmt)
648 && (gimple_call_flags (new_stmt)
649 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
651 tree vdef;
652 if (!laststore)
653 vdef = gimple_vdef (stmt);
654 else
655 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
656 gimple_set_vdef (new_stmt, vdef);
657 if (vdef && TREE_CODE (vdef) == SSA_NAME)
658 SSA_NAME_DEF_STMT (vdef) = new_stmt;
659 laststore = new_stmt;
663 /* Second iterate over the statements forward, assigning virtual
664 operands to their uses. */
665 reaching_vuse = gimple_vuse (stmt);
666 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
668 new_stmt = gsi_stmt (i);
669 /* If the new statement possibly has a VUSE, update it with exact SSA
670 name we know will reach this one. */
671 if (gimple_has_mem_ops (new_stmt))
672 gimple_set_vuse (new_stmt, reaching_vuse);
673 gimple_set_modified (new_stmt, true);
674 if (gimple_vdef (new_stmt))
675 reaching_vuse = gimple_vdef (new_stmt);
678 /* If the new sequence does not do a store release the virtual
679 definition of the original statement. */
680 if (reaching_vuse
681 && reaching_vuse == gimple_vuse (stmt))
683 tree vdef = gimple_vdef (stmt);
684 if (vdef
685 && TREE_CODE (vdef) == SSA_NAME)
687 unlink_stmt_vdef (stmt);
688 release_ssa_name (vdef);
692 /* Finally replace the original statement with the sequence. */
693 gsi_replace_with_seq (si_p, stmts, false);
696 /* Return the string length, maximum string length or maximum value of
697 ARG in LENGTH.
698 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
699 is not NULL and, for TYPE == 0, its value is not equal to the length
700 we determine or if we are unable to determine the length or value,
701 return false. VISITED is a bitmap of visited variables.
702 TYPE is 0 if string length should be returned, 1 for maximum string
703 length and 2 for maximum value ARG can have. */
705 static bool
706 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
708 tree var, val;
709 gimple def_stmt;
711 if (TREE_CODE (arg) != SSA_NAME)
713 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
714 if (TREE_CODE (arg) == ADDR_EXPR
715 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
716 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
718 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
719 if (TREE_CODE (aop0) == INDIRECT_REF
720 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
721 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
722 length, visited, type);
725 if (type == 2)
727 val = arg;
728 if (TREE_CODE (val) != INTEGER_CST
729 || tree_int_cst_sgn (val) < 0)
730 return false;
732 else
733 val = c_strlen (arg, 1);
734 if (!val)
735 return false;
737 if (*length)
739 if (type > 0)
741 if (TREE_CODE (*length) != INTEGER_CST
742 || TREE_CODE (val) != INTEGER_CST)
743 return false;
745 if (tree_int_cst_lt (*length, val))
746 *length = val;
747 return true;
749 else if (simple_cst_equal (val, *length) != 1)
750 return false;
753 *length = val;
754 return true;
757 /* If ARG is registered for SSA update we cannot look at its defining
758 statement. */
759 if (name_registered_for_update_p (arg))
760 return false;
762 /* If we were already here, break the infinite cycle. */
763 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
764 return true;
766 var = arg;
767 def_stmt = SSA_NAME_DEF_STMT (var);
769 switch (gimple_code (def_stmt))
771 case GIMPLE_ASSIGN:
772 /* The RHS of the statement defining VAR must either have a
773 constant length or come from another SSA_NAME with a constant
774 length. */
775 if (gimple_assign_single_p (def_stmt)
776 || gimple_assign_unary_nop_p (def_stmt))
778 tree rhs = gimple_assign_rhs1 (def_stmt);
779 return get_maxval_strlen (rhs, length, visited, type);
781 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
783 tree op2 = gimple_assign_rhs2 (def_stmt);
784 tree op3 = gimple_assign_rhs3 (def_stmt);
785 return get_maxval_strlen (op2, length, visited, type)
786 && get_maxval_strlen (op3, length, visited, type);
788 return false;
790 case GIMPLE_PHI:
792 /* All the arguments of the PHI node must have the same constant
793 length. */
794 unsigned i;
796 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
798 tree arg = gimple_phi_arg (def_stmt, i)->def;
800 /* If this PHI has itself as an argument, we cannot
801 determine the string length of this argument. However,
802 if we can find a constant string length for the other
803 PHI args then we can still be sure that this is a
804 constant string length. So be optimistic and just
805 continue with the next argument. */
806 if (arg == gimple_phi_result (def_stmt))
807 continue;
809 if (!get_maxval_strlen (arg, length, visited, type))
810 return false;
813 return true;
815 default:
816 return false;
821 /* Fold builtin call in statement STMT. Returns a simplified tree.
822 We may return a non-constant expression, including another call
823 to a different function and with different arguments, e.g.,
824 substituting memcpy for strcpy when the string length is known.
825 Note that some builtins expand into inline code that may not
826 be valid in GIMPLE. Callers must take care. */
828 tree
829 gimple_fold_builtin (gimple stmt)
831 tree result, val[3];
832 tree callee, a;
833 int arg_idx, type;
834 bitmap visited;
835 bool ignore;
836 int nargs;
837 location_t loc = gimple_location (stmt);
839 gcc_assert (is_gimple_call (stmt));
841 ignore = (gimple_call_lhs (stmt) == NULL);
843 /* First try the generic builtin folder. If that succeeds, return the
844 result directly. */
845 result = fold_call_stmt (stmt, ignore);
846 if (result)
848 if (ignore)
849 STRIP_NOPS (result);
850 return result;
853 /* Ignore MD builtins. */
854 callee = gimple_call_fndecl (stmt);
855 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
856 return NULL_TREE;
858 /* Give up for always_inline inline builtins until they are
859 inlined. */
860 if (avoid_folding_inline_builtin (callee))
861 return NULL_TREE;
863 /* If the builtin could not be folded, and it has no argument list,
864 we're done. */
865 nargs = gimple_call_num_args (stmt);
866 if (nargs == 0)
867 return NULL_TREE;
869 /* Limit the work only for builtins we know how to simplify. */
870 switch (DECL_FUNCTION_CODE (callee))
872 case BUILT_IN_STRLEN:
873 case BUILT_IN_FPUTS:
874 case BUILT_IN_FPUTS_UNLOCKED:
875 arg_idx = 0;
876 type = 0;
877 break;
878 case BUILT_IN_STRCPY:
879 case BUILT_IN_STRNCPY:
880 arg_idx = 1;
881 type = 0;
882 break;
883 case BUILT_IN_MEMCPY_CHK:
884 case BUILT_IN_MEMPCPY_CHK:
885 case BUILT_IN_MEMMOVE_CHK:
886 case BUILT_IN_MEMSET_CHK:
887 case BUILT_IN_STRNCPY_CHK:
888 case BUILT_IN_STPNCPY_CHK:
889 arg_idx = 2;
890 type = 2;
891 break;
892 case BUILT_IN_STRCPY_CHK:
893 case BUILT_IN_STPCPY_CHK:
894 arg_idx = 1;
895 type = 1;
896 break;
897 case BUILT_IN_SNPRINTF_CHK:
898 case BUILT_IN_VSNPRINTF_CHK:
899 arg_idx = 1;
900 type = 2;
901 break;
902 default:
903 return NULL_TREE;
906 if (arg_idx >= nargs)
907 return NULL_TREE;
909 /* Try to use the dataflow information gathered by the CCP process. */
910 visited = BITMAP_ALLOC (NULL);
911 bitmap_clear (visited);
913 memset (val, 0, sizeof (val));
914 a = gimple_call_arg (stmt, arg_idx);
915 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
916 val[arg_idx] = NULL_TREE;
918 BITMAP_FREE (visited);
920 result = NULL_TREE;
921 switch (DECL_FUNCTION_CODE (callee))
923 case BUILT_IN_STRLEN:
924 if (val[0] && nargs == 1)
926 tree new_val =
927 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
929 /* If the result is not a valid gimple value, or not a cast
930 of a valid gimple value, then we cannot use the result. */
931 if (is_gimple_val (new_val)
932 || (CONVERT_EXPR_P (new_val)
933 && is_gimple_val (TREE_OPERAND (new_val, 0))))
934 return new_val;
936 break;
938 case BUILT_IN_STRCPY:
939 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
940 result = fold_builtin_strcpy (loc, callee,
941 gimple_call_arg (stmt, 0),
942 gimple_call_arg (stmt, 1),
943 val[1]);
944 break;
946 case BUILT_IN_STRNCPY:
947 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
948 result = fold_builtin_strncpy (loc, callee,
949 gimple_call_arg (stmt, 0),
950 gimple_call_arg (stmt, 1),
951 gimple_call_arg (stmt, 2),
952 val[1]);
953 break;
955 case BUILT_IN_FPUTS:
956 if (nargs == 2)
957 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
958 gimple_call_arg (stmt, 1),
959 ignore, false, val[0]);
960 break;
962 case BUILT_IN_FPUTS_UNLOCKED:
963 if (nargs == 2)
964 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
965 gimple_call_arg (stmt, 1),
966 ignore, true, val[0]);
967 break;
969 case BUILT_IN_MEMCPY_CHK:
970 case BUILT_IN_MEMPCPY_CHK:
971 case BUILT_IN_MEMMOVE_CHK:
972 case BUILT_IN_MEMSET_CHK:
973 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
974 result = fold_builtin_memory_chk (loc, callee,
975 gimple_call_arg (stmt, 0),
976 gimple_call_arg (stmt, 1),
977 gimple_call_arg (stmt, 2),
978 gimple_call_arg (stmt, 3),
979 val[2], ignore,
980 DECL_FUNCTION_CODE (callee));
981 break;
983 case BUILT_IN_STRCPY_CHK:
984 case BUILT_IN_STPCPY_CHK:
985 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
986 result = fold_builtin_stxcpy_chk (loc, callee,
987 gimple_call_arg (stmt, 0),
988 gimple_call_arg (stmt, 1),
989 gimple_call_arg (stmt, 2),
990 val[1], ignore,
991 DECL_FUNCTION_CODE (callee));
992 break;
994 case BUILT_IN_STRNCPY_CHK:
995 case BUILT_IN_STPNCPY_CHK:
996 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
997 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
998 gimple_call_arg (stmt, 1),
999 gimple_call_arg (stmt, 2),
1000 gimple_call_arg (stmt, 3),
1001 val[2], ignore,
1002 DECL_FUNCTION_CODE (callee));
1003 break;
1005 case BUILT_IN_SNPRINTF_CHK:
1006 case BUILT_IN_VSNPRINTF_CHK:
1007 if (val[1] && is_gimple_val (val[1]))
1008 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1009 DECL_FUNCTION_CODE (callee));
1010 break;
1012 default:
1013 gcc_unreachable ();
1016 if (result && ignore)
1017 result = fold_ignored_result (result);
1018 return result;
1022 /* Return a binfo to be used for devirtualization of calls based on an object
1023 represented by a declaration (i.e. a global or automatically allocated one)
1024 or NULL if it cannot be found or is not safe. CST is expected to be an
1025 ADDR_EXPR of such object or the function will return NULL. Currently it is
1026 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1027 EXPECTED_TYPE is type of the class virtual belongs to. */
1029 tree
1030 gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type)
1032 HOST_WIDE_INT offset, size, max_size;
1033 tree base, type, binfo;
1034 bool last_artificial = false;
1036 if (!flag_devirtualize
1037 || TREE_CODE (cst) != ADDR_EXPR
1038 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1039 return NULL_TREE;
1041 cst = TREE_OPERAND (cst, 0);
1042 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1043 type = TREE_TYPE (base);
1044 if (!DECL_P (base)
1045 || max_size == -1
1046 || max_size != size
1047 || TREE_CODE (type) != RECORD_TYPE)
1048 return NULL_TREE;
1050 /* Find the sub-object the constant actually refers to and mark whether it is
1051 an artificial one (as opposed to a user-defined one). */
1052 while (true)
1054 HOST_WIDE_INT pos, size;
1055 tree fld;
1057 if (types_same_for_odr (type, expected_type))
1058 break;
1059 if (offset < 0)
1060 return NULL_TREE;
1062 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1064 if (TREE_CODE (fld) != FIELD_DECL)
1065 continue;
1067 pos = int_bit_position (fld);
1068 size = tree_low_cst (DECL_SIZE (fld), 1);
1069 if (pos <= offset && (pos + size) > offset)
1070 break;
1072 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1073 return NULL_TREE;
1075 last_artificial = DECL_ARTIFICIAL (fld);
1076 type = TREE_TYPE (fld);
1077 offset -= pos;
1079 /* Artificial sub-objects are ancestors, we do not want to use them for
1080 devirtualization, at least not here. */
1081 if (last_artificial)
1082 return NULL_TREE;
1083 binfo = TYPE_BINFO (type);
1084 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1085 return NULL_TREE;
1086 else
1087 return binfo;
1090 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1091 The statement may be replaced by another statement, e.g., if the call
1092 simplifies to a constant value. Return true if any changes were made.
1093 It is assumed that the operands have been previously folded. */
1095 static bool
1096 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1098 gimple stmt = gsi_stmt (*gsi);
1099 tree callee;
1100 bool changed = false;
1101 unsigned i;
1103 /* Fold *& in call arguments. */
1104 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1105 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1107 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1108 if (tmp)
1110 gimple_call_set_arg (stmt, i, tmp);
1111 changed = true;
1115 /* Check for virtual calls that became direct calls. */
1116 callee = gimple_call_fn (stmt);
1117 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1119 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1121 if (dump_file && virtual_method_call_p (callee)
1122 && !possible_polymorphic_call_target_p
1123 (callee, cgraph_get_node (gimple_call_addr_fndecl
1124 (OBJ_TYPE_REF_EXPR (callee)))))
1126 fprintf (dump_file,
1127 "Type inheritnace inconsistent devirtualization of ");
1128 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1129 fprintf (dump_file, " to ");
1130 print_generic_expr (dump_file, callee, TDF_SLIM);
1131 fprintf (dump_file, "\n");
1134 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1135 changed = true;
1137 else if (virtual_method_call_p (callee))
1139 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1140 tree binfo = gimple_extract_devirt_binfo_from_cst
1141 (obj, obj_type_ref_class (callee));
1142 if (binfo)
1144 HOST_WIDE_INT token
1145 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1146 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1147 if (fndecl)
1149 #ifdef ENABLE_CHECKING
1150 gcc_assert (possible_polymorphic_call_target_p
1151 (callee, cgraph_get_node (fndecl)));
1153 #endif
1154 gimple_call_set_fndecl (stmt, fndecl);
1155 changed = true;
1161 if (inplace)
1162 return changed;
1164 /* Check for builtins that CCP can handle using information not
1165 available in the generic fold routines. */
1166 callee = gimple_call_fndecl (stmt);
1167 if (callee && DECL_BUILT_IN (callee))
1169 tree result = gimple_fold_builtin (stmt);
1170 if (result)
1172 if (!update_call_from_tree (gsi, result))
1173 gimplify_and_update_call_from_tree (gsi, result);
1174 changed = true;
1176 else if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1177 changed |= targetm.gimple_fold_builtin (gsi);
1180 return changed;
1183 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1184 distinguishes both cases. */
1186 static bool
1187 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1189 bool changed = false;
1190 gimple stmt = gsi_stmt (*gsi);
1191 unsigned i;
1193 /* Fold the main computation performed by the statement. */
1194 switch (gimple_code (stmt))
1196 case GIMPLE_ASSIGN:
1198 unsigned old_num_ops = gimple_num_ops (stmt);
1199 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1200 tree lhs = gimple_assign_lhs (stmt);
1201 tree new_rhs;
1202 /* First canonicalize operand order. This avoids building new
1203 trees if this is the only thing fold would later do. */
1204 if ((commutative_tree_code (subcode)
1205 || commutative_ternary_tree_code (subcode))
1206 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1207 gimple_assign_rhs2 (stmt), false))
1209 tree tem = gimple_assign_rhs1 (stmt);
1210 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1211 gimple_assign_set_rhs2 (stmt, tem);
1212 changed = true;
1214 new_rhs = fold_gimple_assign (gsi);
1215 if (new_rhs
1216 && !useless_type_conversion_p (TREE_TYPE (lhs),
1217 TREE_TYPE (new_rhs)))
1218 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1219 if (new_rhs
1220 && (!inplace
1221 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1223 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1224 changed = true;
1226 break;
1229 case GIMPLE_COND:
1230 changed |= fold_gimple_cond (stmt);
1231 break;
1233 case GIMPLE_CALL:
1234 changed |= gimple_fold_call (gsi, inplace);
1235 break;
1237 case GIMPLE_ASM:
1238 /* Fold *& in asm operands. */
1240 size_t noutputs;
1241 const char **oconstraints;
1242 const char *constraint;
1243 bool allows_mem, allows_reg;
1245 noutputs = gimple_asm_noutputs (stmt);
1246 oconstraints = XALLOCAVEC (const char *, noutputs);
1248 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1250 tree link = gimple_asm_output_op (stmt, i);
1251 tree op = TREE_VALUE (link);
1252 oconstraints[i]
1253 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1254 if (REFERENCE_CLASS_P (op)
1255 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1257 TREE_VALUE (link) = op;
1258 changed = true;
1261 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1263 tree link = gimple_asm_input_op (stmt, i);
1264 tree op = TREE_VALUE (link);
1265 constraint
1266 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1267 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1268 oconstraints, &allows_mem, &allows_reg);
1269 if (REFERENCE_CLASS_P (op)
1270 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1271 != NULL_TREE)
1273 TREE_VALUE (link) = op;
1274 changed = true;
1278 break;
1280 case GIMPLE_DEBUG:
1281 if (gimple_debug_bind_p (stmt))
1283 tree val = gimple_debug_bind_get_value (stmt);
1284 if (val
1285 && REFERENCE_CLASS_P (val))
1287 tree tem = maybe_fold_reference (val, false);
1288 if (tem)
1290 gimple_debug_bind_set_value (stmt, tem);
1291 changed = true;
1294 else if (val
1295 && TREE_CODE (val) == ADDR_EXPR)
1297 tree ref = TREE_OPERAND (val, 0);
1298 tree tem = maybe_fold_reference (ref, false);
1299 if (tem)
1301 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1302 gimple_debug_bind_set_value (stmt, tem);
1303 changed = true;
1307 break;
1309 default:;
1312 stmt = gsi_stmt (*gsi);
1314 /* Fold *& on the lhs. */
1315 if (gimple_has_lhs (stmt))
1317 tree lhs = gimple_get_lhs (stmt);
1318 if (lhs && REFERENCE_CLASS_P (lhs))
1320 tree new_lhs = maybe_fold_reference (lhs, true);
1321 if (new_lhs)
1323 gimple_set_lhs (stmt, new_lhs);
1324 changed = true;
1329 return changed;
1332 /* Fold the statement pointed to by GSI. In some cases, this function may
1333 replace the whole statement with a new one. Returns true iff folding
1334 makes any changes.
1335 The statement pointed to by GSI should be in valid gimple form but may
1336 be in unfolded state as resulting from for example constant propagation
1337 which can produce *&x = 0. */
1339 bool
1340 fold_stmt (gimple_stmt_iterator *gsi)
1342 return fold_stmt_1 (gsi, false);
1345 /* Perform the minimal folding on statement *GSI. Only operations like
1346 *&x created by constant propagation are handled. The statement cannot
1347 be replaced with a new one. Return true if the statement was
1348 changed, false otherwise.
1349 The statement *GSI should be in valid gimple form but may
1350 be in unfolded state as resulting from for example constant propagation
1351 which can produce *&x = 0. */
1353 bool
1354 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1356 gimple stmt = gsi_stmt (*gsi);
1357 bool changed = fold_stmt_1 (gsi, true);
1358 gcc_assert (gsi_stmt (*gsi) == stmt);
1359 return changed;
1362 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1363 if EXPR is null or we don't know how.
1364 If non-null, the result always has boolean type. */
1366 static tree
1367 canonicalize_bool (tree expr, bool invert)
1369 if (!expr)
1370 return NULL_TREE;
1371 else if (invert)
1373 if (integer_nonzerop (expr))
1374 return boolean_false_node;
1375 else if (integer_zerop (expr))
1376 return boolean_true_node;
1377 else if (TREE_CODE (expr) == SSA_NAME)
1378 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1379 build_int_cst (TREE_TYPE (expr), 0));
1380 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1381 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1382 boolean_type_node,
1383 TREE_OPERAND (expr, 0),
1384 TREE_OPERAND (expr, 1));
1385 else
1386 return NULL_TREE;
1388 else
1390 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1391 return expr;
1392 if (integer_nonzerop (expr))
1393 return boolean_true_node;
1394 else if (integer_zerop (expr))
1395 return boolean_false_node;
1396 else if (TREE_CODE (expr) == SSA_NAME)
1397 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1398 build_int_cst (TREE_TYPE (expr), 0));
1399 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1400 return fold_build2 (TREE_CODE (expr),
1401 boolean_type_node,
1402 TREE_OPERAND (expr, 0),
1403 TREE_OPERAND (expr, 1));
1404 else
1405 return NULL_TREE;
1409 /* Check to see if a boolean expression EXPR is logically equivalent to the
1410 comparison (OP1 CODE OP2). Check for various identities involving
1411 SSA_NAMEs. */
1413 static bool
1414 same_bool_comparison_p (const_tree expr, enum tree_code code,
1415 const_tree op1, const_tree op2)
1417 gimple s;
1419 /* The obvious case. */
1420 if (TREE_CODE (expr) == code
1421 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1422 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1423 return true;
1425 /* Check for comparing (name, name != 0) and the case where expr
1426 is an SSA_NAME with a definition matching the comparison. */
1427 if (TREE_CODE (expr) == SSA_NAME
1428 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1430 if (operand_equal_p (expr, op1, 0))
1431 return ((code == NE_EXPR && integer_zerop (op2))
1432 || (code == EQ_EXPR && integer_nonzerop (op2)));
1433 s = SSA_NAME_DEF_STMT (expr);
1434 if (is_gimple_assign (s)
1435 && gimple_assign_rhs_code (s) == code
1436 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1437 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1438 return true;
1441 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1442 of name is a comparison, recurse. */
1443 if (TREE_CODE (op1) == SSA_NAME
1444 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1446 s = SSA_NAME_DEF_STMT (op1);
1447 if (is_gimple_assign (s)
1448 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1450 enum tree_code c = gimple_assign_rhs_code (s);
1451 if ((c == NE_EXPR && integer_zerop (op2))
1452 || (c == EQ_EXPR && integer_nonzerop (op2)))
1453 return same_bool_comparison_p (expr, c,
1454 gimple_assign_rhs1 (s),
1455 gimple_assign_rhs2 (s));
1456 if ((c == EQ_EXPR && integer_zerop (op2))
1457 || (c == NE_EXPR && integer_nonzerop (op2)))
1458 return same_bool_comparison_p (expr,
1459 invert_tree_comparison (c, false),
1460 gimple_assign_rhs1 (s),
1461 gimple_assign_rhs2 (s));
1464 return false;
1467 /* Check to see if two boolean expressions OP1 and OP2 are logically
1468 equivalent. */
1470 static bool
1471 same_bool_result_p (const_tree op1, const_tree op2)
1473 /* Simple cases first. */
1474 if (operand_equal_p (op1, op2, 0))
1475 return true;
1477 /* Check the cases where at least one of the operands is a comparison.
1478 These are a bit smarter than operand_equal_p in that they apply some
1479 identifies on SSA_NAMEs. */
1480 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1481 && same_bool_comparison_p (op1, TREE_CODE (op2),
1482 TREE_OPERAND (op2, 0),
1483 TREE_OPERAND (op2, 1)))
1484 return true;
1485 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1486 && same_bool_comparison_p (op2, TREE_CODE (op1),
1487 TREE_OPERAND (op1, 0),
1488 TREE_OPERAND (op1, 1)))
1489 return true;
1491 /* Default case. */
1492 return false;
1495 /* Forward declarations for some mutually recursive functions. */
1497 static tree
1498 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1499 enum tree_code code2, tree op2a, tree op2b);
1500 static tree
1501 and_var_with_comparison (tree var, bool invert,
1502 enum tree_code code2, tree op2a, tree op2b);
1503 static tree
1504 and_var_with_comparison_1 (gimple stmt,
1505 enum tree_code code2, tree op2a, tree op2b);
1506 static tree
1507 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1508 enum tree_code code2, tree op2a, tree op2b);
1509 static tree
1510 or_var_with_comparison (tree var, bool invert,
1511 enum tree_code code2, tree op2a, tree op2b);
1512 static tree
1513 or_var_with_comparison_1 (gimple stmt,
1514 enum tree_code code2, tree op2a, tree op2b);
1516 /* Helper function for and_comparisons_1: try to simplify the AND of the
1517 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1518 If INVERT is true, invert the value of the VAR before doing the AND.
1519 Return NULL_EXPR if we can't simplify this to a single expression. */
1521 static tree
1522 and_var_with_comparison (tree var, bool invert,
1523 enum tree_code code2, tree op2a, tree op2b)
1525 tree t;
1526 gimple stmt = SSA_NAME_DEF_STMT (var);
1528 /* We can only deal with variables whose definitions are assignments. */
1529 if (!is_gimple_assign (stmt))
1530 return NULL_TREE;
1532 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1533 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1534 Then we only have to consider the simpler non-inverted cases. */
1535 if (invert)
1536 t = or_var_with_comparison_1 (stmt,
1537 invert_tree_comparison (code2, false),
1538 op2a, op2b);
1539 else
1540 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1541 return canonicalize_bool (t, invert);
1544 /* Try to simplify the AND of the ssa variable defined by the assignment
1545 STMT with the comparison specified by (OP2A CODE2 OP2B).
1546 Return NULL_EXPR if we can't simplify this to a single expression. */
1548 static tree
1549 and_var_with_comparison_1 (gimple stmt,
1550 enum tree_code code2, tree op2a, tree op2b)
1552 tree var = gimple_assign_lhs (stmt);
1553 tree true_test_var = NULL_TREE;
1554 tree false_test_var = NULL_TREE;
1555 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1557 /* Check for identities like (var AND (var == 0)) => false. */
1558 if (TREE_CODE (op2a) == SSA_NAME
1559 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1561 if ((code2 == NE_EXPR && integer_zerop (op2b))
1562 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1564 true_test_var = op2a;
1565 if (var == true_test_var)
1566 return var;
1568 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1569 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1571 false_test_var = op2a;
1572 if (var == false_test_var)
1573 return boolean_false_node;
1577 /* If the definition is a comparison, recurse on it. */
1578 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1580 tree t = and_comparisons_1 (innercode,
1581 gimple_assign_rhs1 (stmt),
1582 gimple_assign_rhs2 (stmt),
1583 code2,
1584 op2a,
1585 op2b);
1586 if (t)
1587 return t;
1590 /* If the definition is an AND or OR expression, we may be able to
1591 simplify by reassociating. */
1592 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1593 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1595 tree inner1 = gimple_assign_rhs1 (stmt);
1596 tree inner2 = gimple_assign_rhs2 (stmt);
1597 gimple s;
1598 tree t;
1599 tree partial = NULL_TREE;
1600 bool is_and = (innercode == BIT_AND_EXPR);
1602 /* Check for boolean identities that don't require recursive examination
1603 of inner1/inner2:
1604 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1605 inner1 AND (inner1 OR inner2) => inner1
1606 !inner1 AND (inner1 AND inner2) => false
1607 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1608 Likewise for similar cases involving inner2. */
1609 if (inner1 == true_test_var)
1610 return (is_and ? var : inner1);
1611 else if (inner2 == true_test_var)
1612 return (is_and ? var : inner2);
1613 else if (inner1 == false_test_var)
1614 return (is_and
1615 ? boolean_false_node
1616 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1617 else if (inner2 == false_test_var)
1618 return (is_and
1619 ? boolean_false_node
1620 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1622 /* Next, redistribute/reassociate the AND across the inner tests.
1623 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1624 if (TREE_CODE (inner1) == SSA_NAME
1625 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1626 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1627 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1628 gimple_assign_rhs1 (s),
1629 gimple_assign_rhs2 (s),
1630 code2, op2a, op2b)))
1632 /* Handle the AND case, where we are reassociating:
1633 (inner1 AND inner2) AND (op2a code2 op2b)
1634 => (t AND inner2)
1635 If the partial result t is a constant, we win. Otherwise
1636 continue on to try reassociating with the other inner test. */
1637 if (is_and)
1639 if (integer_onep (t))
1640 return inner2;
1641 else if (integer_zerop (t))
1642 return boolean_false_node;
1645 /* Handle the OR case, where we are redistributing:
1646 (inner1 OR inner2) AND (op2a code2 op2b)
1647 => (t OR (inner2 AND (op2a code2 op2b))) */
1648 else if (integer_onep (t))
1649 return boolean_true_node;
1651 /* Save partial result for later. */
1652 partial = t;
1655 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1656 if (TREE_CODE (inner2) == SSA_NAME
1657 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1658 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1659 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1660 gimple_assign_rhs1 (s),
1661 gimple_assign_rhs2 (s),
1662 code2, op2a, op2b)))
1664 /* Handle the AND case, where we are reassociating:
1665 (inner1 AND inner2) AND (op2a code2 op2b)
1666 => (inner1 AND t) */
1667 if (is_and)
1669 if (integer_onep (t))
1670 return inner1;
1671 else if (integer_zerop (t))
1672 return boolean_false_node;
1673 /* If both are the same, we can apply the identity
1674 (x AND x) == x. */
1675 else if (partial && same_bool_result_p (t, partial))
1676 return t;
1679 /* Handle the OR case. where we are redistributing:
1680 (inner1 OR inner2) AND (op2a code2 op2b)
1681 => (t OR (inner1 AND (op2a code2 op2b)))
1682 => (t OR partial) */
1683 else
1685 if (integer_onep (t))
1686 return boolean_true_node;
1687 else if (partial)
1689 /* We already got a simplification for the other
1690 operand to the redistributed OR expression. The
1691 interesting case is when at least one is false.
1692 Or, if both are the same, we can apply the identity
1693 (x OR x) == x. */
1694 if (integer_zerop (partial))
1695 return t;
1696 else if (integer_zerop (t))
1697 return partial;
1698 else if (same_bool_result_p (t, partial))
1699 return t;
1704 return NULL_TREE;
1707 /* Try to simplify the AND of two comparisons defined by
1708 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1709 If this can be done without constructing an intermediate value,
1710 return the resulting tree; otherwise NULL_TREE is returned.
1711 This function is deliberately asymmetric as it recurses on SSA_DEFs
1712 in the first comparison but not the second. */
1714 static tree
1715 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1716 enum tree_code code2, tree op2a, tree op2b)
1718 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1720 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1721 if (operand_equal_p (op1a, op2a, 0)
1722 && operand_equal_p (op1b, op2b, 0))
1724 /* Result will be either NULL_TREE, or a combined comparison. */
1725 tree t = combine_comparisons (UNKNOWN_LOCATION,
1726 TRUTH_ANDIF_EXPR, code1, code2,
1727 truth_type, op1a, op1b);
1728 if (t)
1729 return t;
1732 /* Likewise the swapped case of the above. */
1733 if (operand_equal_p (op1a, op2b, 0)
1734 && operand_equal_p (op1b, op2a, 0))
1736 /* Result will be either NULL_TREE, or a combined comparison. */
1737 tree t = combine_comparisons (UNKNOWN_LOCATION,
1738 TRUTH_ANDIF_EXPR, code1,
1739 swap_tree_comparison (code2),
1740 truth_type, op1a, op1b);
1741 if (t)
1742 return t;
1745 /* If both comparisons are of the same value against constants, we might
1746 be able to merge them. */
1747 if (operand_equal_p (op1a, op2a, 0)
1748 && TREE_CODE (op1b) == INTEGER_CST
1749 && TREE_CODE (op2b) == INTEGER_CST)
1751 int cmp = tree_int_cst_compare (op1b, op2b);
1753 /* If we have (op1a == op1b), we should either be able to
1754 return that or FALSE, depending on whether the constant op1b
1755 also satisfies the other comparison against op2b. */
1756 if (code1 == EQ_EXPR)
1758 bool done = true;
1759 bool val;
1760 switch (code2)
1762 case EQ_EXPR: val = (cmp == 0); break;
1763 case NE_EXPR: val = (cmp != 0); break;
1764 case LT_EXPR: val = (cmp < 0); break;
1765 case GT_EXPR: val = (cmp > 0); break;
1766 case LE_EXPR: val = (cmp <= 0); break;
1767 case GE_EXPR: val = (cmp >= 0); break;
1768 default: done = false;
1770 if (done)
1772 if (val)
1773 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1774 else
1775 return boolean_false_node;
1778 /* Likewise if the second comparison is an == comparison. */
1779 else if (code2 == EQ_EXPR)
1781 bool done = true;
1782 bool val;
1783 switch (code1)
1785 case EQ_EXPR: val = (cmp == 0); break;
1786 case NE_EXPR: val = (cmp != 0); break;
1787 case LT_EXPR: val = (cmp > 0); break;
1788 case GT_EXPR: val = (cmp < 0); break;
1789 case LE_EXPR: val = (cmp >= 0); break;
1790 case GE_EXPR: val = (cmp <= 0); break;
1791 default: done = false;
1793 if (done)
1795 if (val)
1796 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1797 else
1798 return boolean_false_node;
1802 /* Same business with inequality tests. */
1803 else if (code1 == NE_EXPR)
1805 bool val;
1806 switch (code2)
1808 case EQ_EXPR: val = (cmp != 0); break;
1809 case NE_EXPR: val = (cmp == 0); break;
1810 case LT_EXPR: val = (cmp >= 0); break;
1811 case GT_EXPR: val = (cmp <= 0); break;
1812 case LE_EXPR: val = (cmp > 0); break;
1813 case GE_EXPR: val = (cmp < 0); break;
1814 default:
1815 val = false;
1817 if (val)
1818 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1820 else if (code2 == NE_EXPR)
1822 bool val;
1823 switch (code1)
1825 case EQ_EXPR: val = (cmp == 0); break;
1826 case NE_EXPR: val = (cmp != 0); break;
1827 case LT_EXPR: val = (cmp <= 0); break;
1828 case GT_EXPR: val = (cmp >= 0); break;
1829 case LE_EXPR: val = (cmp < 0); break;
1830 case GE_EXPR: val = (cmp > 0); break;
1831 default:
1832 val = false;
1834 if (val)
1835 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1838 /* Chose the more restrictive of two < or <= comparisons. */
1839 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1840 && (code2 == LT_EXPR || code2 == LE_EXPR))
1842 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1843 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1844 else
1845 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1848 /* Likewise chose the more restrictive of two > or >= comparisons. */
1849 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1850 && (code2 == GT_EXPR || code2 == GE_EXPR))
1852 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1853 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1854 else
1855 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1858 /* Check for singleton ranges. */
1859 else if (cmp == 0
1860 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1861 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1862 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1864 /* Check for disjoint ranges. */
1865 else if (cmp <= 0
1866 && (code1 == LT_EXPR || code1 == LE_EXPR)
1867 && (code2 == GT_EXPR || code2 == GE_EXPR))
1868 return boolean_false_node;
1869 else if (cmp >= 0
1870 && (code1 == GT_EXPR || code1 == GE_EXPR)
1871 && (code2 == LT_EXPR || code2 == LE_EXPR))
1872 return boolean_false_node;
1875 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1876 NAME's definition is a truth value. See if there are any simplifications
1877 that can be done against the NAME's definition. */
1878 if (TREE_CODE (op1a) == SSA_NAME
1879 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1880 && (integer_zerop (op1b) || integer_onep (op1b)))
1882 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1883 || (code1 == NE_EXPR && integer_onep (op1b)));
1884 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1885 switch (gimple_code (stmt))
1887 case GIMPLE_ASSIGN:
1888 /* Try to simplify by copy-propagating the definition. */
1889 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1891 case GIMPLE_PHI:
1892 /* If every argument to the PHI produces the same result when
1893 ANDed with the second comparison, we win.
1894 Do not do this unless the type is bool since we need a bool
1895 result here anyway. */
1896 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1898 tree result = NULL_TREE;
1899 unsigned i;
1900 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1902 tree arg = gimple_phi_arg_def (stmt, i);
1904 /* If this PHI has itself as an argument, ignore it.
1905 If all the other args produce the same result,
1906 we're still OK. */
1907 if (arg == gimple_phi_result (stmt))
1908 continue;
1909 else if (TREE_CODE (arg) == INTEGER_CST)
1911 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1913 if (!result)
1914 result = boolean_false_node;
1915 else if (!integer_zerop (result))
1916 return NULL_TREE;
1918 else if (!result)
1919 result = fold_build2 (code2, boolean_type_node,
1920 op2a, op2b);
1921 else if (!same_bool_comparison_p (result,
1922 code2, op2a, op2b))
1923 return NULL_TREE;
1925 else if (TREE_CODE (arg) == SSA_NAME
1926 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1928 tree temp;
1929 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1930 /* In simple cases we can look through PHI nodes,
1931 but we have to be careful with loops.
1932 See PR49073. */
1933 if (! dom_info_available_p (CDI_DOMINATORS)
1934 || gimple_bb (def_stmt) == gimple_bb (stmt)
1935 || dominated_by_p (CDI_DOMINATORS,
1936 gimple_bb (def_stmt),
1937 gimple_bb (stmt)))
1938 return NULL_TREE;
1939 temp = and_var_with_comparison (arg, invert, code2,
1940 op2a, op2b);
1941 if (!temp)
1942 return NULL_TREE;
1943 else if (!result)
1944 result = temp;
1945 else if (!same_bool_result_p (result, temp))
1946 return NULL_TREE;
1948 else
1949 return NULL_TREE;
1951 return result;
1954 default:
1955 break;
1958 return NULL_TREE;
1961 /* Try to simplify the AND of two comparisons, specified by
1962 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1963 If this can be simplified to a single expression (without requiring
1964 introducing more SSA variables to hold intermediate values),
1965 return the resulting tree. Otherwise return NULL_TREE.
1966 If the result expression is non-null, it has boolean type. */
1968 tree
1969 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1970 enum tree_code code2, tree op2a, tree op2b)
1972 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1973 if (t)
1974 return t;
1975 else
1976 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1979 /* Helper function for or_comparisons_1: try to simplify the OR of the
1980 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1981 If INVERT is true, invert the value of VAR before doing the OR.
1982 Return NULL_EXPR if we can't simplify this to a single expression. */
1984 static tree
1985 or_var_with_comparison (tree var, bool invert,
1986 enum tree_code code2, tree op2a, tree op2b)
1988 tree t;
1989 gimple stmt = SSA_NAME_DEF_STMT (var);
1991 /* We can only deal with variables whose definitions are assignments. */
1992 if (!is_gimple_assign (stmt))
1993 return NULL_TREE;
1995 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1996 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1997 Then we only have to consider the simpler non-inverted cases. */
1998 if (invert)
1999 t = and_var_with_comparison_1 (stmt,
2000 invert_tree_comparison (code2, false),
2001 op2a, op2b);
2002 else
2003 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2004 return canonicalize_bool (t, invert);
2007 /* Try to simplify the OR of the ssa variable defined by the assignment
2008 STMT with the comparison specified by (OP2A CODE2 OP2B).
2009 Return NULL_EXPR if we can't simplify this to a single expression. */
2011 static tree
2012 or_var_with_comparison_1 (gimple stmt,
2013 enum tree_code code2, tree op2a, tree op2b)
2015 tree var = gimple_assign_lhs (stmt);
2016 tree true_test_var = NULL_TREE;
2017 tree false_test_var = NULL_TREE;
2018 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2020 /* Check for identities like (var OR (var != 0)) => true . */
2021 if (TREE_CODE (op2a) == SSA_NAME
2022 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2024 if ((code2 == NE_EXPR && integer_zerop (op2b))
2025 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2027 true_test_var = op2a;
2028 if (var == true_test_var)
2029 return var;
2031 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2032 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2034 false_test_var = op2a;
2035 if (var == false_test_var)
2036 return boolean_true_node;
2040 /* If the definition is a comparison, recurse on it. */
2041 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2043 tree t = or_comparisons_1 (innercode,
2044 gimple_assign_rhs1 (stmt),
2045 gimple_assign_rhs2 (stmt),
2046 code2,
2047 op2a,
2048 op2b);
2049 if (t)
2050 return t;
2053 /* If the definition is an AND or OR expression, we may be able to
2054 simplify by reassociating. */
2055 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2056 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2058 tree inner1 = gimple_assign_rhs1 (stmt);
2059 tree inner2 = gimple_assign_rhs2 (stmt);
2060 gimple s;
2061 tree t;
2062 tree partial = NULL_TREE;
2063 bool is_or = (innercode == BIT_IOR_EXPR);
2065 /* Check for boolean identities that don't require recursive examination
2066 of inner1/inner2:
2067 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2068 inner1 OR (inner1 AND inner2) => inner1
2069 !inner1 OR (inner1 OR inner2) => true
2070 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2072 if (inner1 == true_test_var)
2073 return (is_or ? var : inner1);
2074 else if (inner2 == true_test_var)
2075 return (is_or ? var : inner2);
2076 else if (inner1 == false_test_var)
2077 return (is_or
2078 ? boolean_true_node
2079 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2080 else if (inner2 == false_test_var)
2081 return (is_or
2082 ? boolean_true_node
2083 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2085 /* Next, redistribute/reassociate the OR across the inner tests.
2086 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2087 if (TREE_CODE (inner1) == SSA_NAME
2088 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2089 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2090 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2091 gimple_assign_rhs1 (s),
2092 gimple_assign_rhs2 (s),
2093 code2, op2a, op2b)))
2095 /* Handle the OR case, where we are reassociating:
2096 (inner1 OR inner2) OR (op2a code2 op2b)
2097 => (t OR inner2)
2098 If the partial result t is a constant, we win. Otherwise
2099 continue on to try reassociating with the other inner test. */
2100 if (is_or)
2102 if (integer_onep (t))
2103 return boolean_true_node;
2104 else if (integer_zerop (t))
2105 return inner2;
2108 /* Handle the AND case, where we are redistributing:
2109 (inner1 AND inner2) OR (op2a code2 op2b)
2110 => (t AND (inner2 OR (op2a code op2b))) */
2111 else if (integer_zerop (t))
2112 return boolean_false_node;
2114 /* Save partial result for later. */
2115 partial = t;
2118 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2119 if (TREE_CODE (inner2) == SSA_NAME
2120 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2121 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2122 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2123 gimple_assign_rhs1 (s),
2124 gimple_assign_rhs2 (s),
2125 code2, op2a, op2b)))
2127 /* Handle the OR case, where we are reassociating:
2128 (inner1 OR inner2) OR (op2a code2 op2b)
2129 => (inner1 OR t)
2130 => (t OR partial) */
2131 if (is_or)
2133 if (integer_zerop (t))
2134 return inner1;
2135 else if (integer_onep (t))
2136 return boolean_true_node;
2137 /* If both are the same, we can apply the identity
2138 (x OR x) == x. */
2139 else if (partial && same_bool_result_p (t, partial))
2140 return t;
2143 /* Handle the AND case, where we are redistributing:
2144 (inner1 AND inner2) OR (op2a code2 op2b)
2145 => (t AND (inner1 OR (op2a code2 op2b)))
2146 => (t AND partial) */
2147 else
2149 if (integer_zerop (t))
2150 return boolean_false_node;
2151 else if (partial)
2153 /* We already got a simplification for the other
2154 operand to the redistributed AND expression. The
2155 interesting case is when at least one is true.
2156 Or, if both are the same, we can apply the identity
2157 (x AND x) == x. */
2158 if (integer_onep (partial))
2159 return t;
2160 else if (integer_onep (t))
2161 return partial;
2162 else if (same_bool_result_p (t, partial))
2163 return t;
2168 return NULL_TREE;
2171 /* Try to simplify the OR of two comparisons defined by
2172 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2173 If this can be done without constructing an intermediate value,
2174 return the resulting tree; otherwise NULL_TREE is returned.
2175 This function is deliberately asymmetric as it recurses on SSA_DEFs
2176 in the first comparison but not the second. */
2178 static tree
2179 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2180 enum tree_code code2, tree op2a, tree op2b)
2182 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2184 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2185 if (operand_equal_p (op1a, op2a, 0)
2186 && operand_equal_p (op1b, op2b, 0))
2188 /* Result will be either NULL_TREE, or a combined comparison. */
2189 tree t = combine_comparisons (UNKNOWN_LOCATION,
2190 TRUTH_ORIF_EXPR, code1, code2,
2191 truth_type, op1a, op1b);
2192 if (t)
2193 return t;
2196 /* Likewise the swapped case of the above. */
2197 if (operand_equal_p (op1a, op2b, 0)
2198 && operand_equal_p (op1b, op2a, 0))
2200 /* Result will be either NULL_TREE, or a combined comparison. */
2201 tree t = combine_comparisons (UNKNOWN_LOCATION,
2202 TRUTH_ORIF_EXPR, code1,
2203 swap_tree_comparison (code2),
2204 truth_type, op1a, op1b);
2205 if (t)
2206 return t;
2209 /* If both comparisons are of the same value against constants, we might
2210 be able to merge them. */
2211 if (operand_equal_p (op1a, op2a, 0)
2212 && TREE_CODE (op1b) == INTEGER_CST
2213 && TREE_CODE (op2b) == INTEGER_CST)
2215 int cmp = tree_int_cst_compare (op1b, op2b);
2217 /* If we have (op1a != op1b), we should either be able to
2218 return that or TRUE, depending on whether the constant op1b
2219 also satisfies the other comparison against op2b. */
2220 if (code1 == NE_EXPR)
2222 bool done = true;
2223 bool val;
2224 switch (code2)
2226 case EQ_EXPR: val = (cmp == 0); break;
2227 case NE_EXPR: val = (cmp != 0); break;
2228 case LT_EXPR: val = (cmp < 0); break;
2229 case GT_EXPR: val = (cmp > 0); break;
2230 case LE_EXPR: val = (cmp <= 0); break;
2231 case GE_EXPR: val = (cmp >= 0); break;
2232 default: done = false;
2234 if (done)
2236 if (val)
2237 return boolean_true_node;
2238 else
2239 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2242 /* Likewise if the second comparison is a != comparison. */
2243 else if (code2 == NE_EXPR)
2245 bool done = true;
2246 bool val;
2247 switch (code1)
2249 case EQ_EXPR: val = (cmp == 0); break;
2250 case NE_EXPR: val = (cmp != 0); break;
2251 case LT_EXPR: val = (cmp > 0); break;
2252 case GT_EXPR: val = (cmp < 0); break;
2253 case LE_EXPR: val = (cmp >= 0); break;
2254 case GE_EXPR: val = (cmp <= 0); break;
2255 default: done = false;
2257 if (done)
2259 if (val)
2260 return boolean_true_node;
2261 else
2262 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2266 /* See if an equality test is redundant with the other comparison. */
2267 else if (code1 == EQ_EXPR)
2269 bool val;
2270 switch (code2)
2272 case EQ_EXPR: val = (cmp == 0); break;
2273 case NE_EXPR: val = (cmp != 0); break;
2274 case LT_EXPR: val = (cmp < 0); break;
2275 case GT_EXPR: val = (cmp > 0); break;
2276 case LE_EXPR: val = (cmp <= 0); break;
2277 case GE_EXPR: val = (cmp >= 0); break;
2278 default:
2279 val = false;
2281 if (val)
2282 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2284 else if (code2 == EQ_EXPR)
2286 bool val;
2287 switch (code1)
2289 case EQ_EXPR: val = (cmp == 0); break;
2290 case NE_EXPR: val = (cmp != 0); break;
2291 case LT_EXPR: val = (cmp > 0); break;
2292 case GT_EXPR: val = (cmp < 0); break;
2293 case LE_EXPR: val = (cmp >= 0); break;
2294 case GE_EXPR: val = (cmp <= 0); break;
2295 default:
2296 val = false;
2298 if (val)
2299 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2302 /* Chose the less restrictive of two < or <= comparisons. */
2303 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2304 && (code2 == LT_EXPR || code2 == LE_EXPR))
2306 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2307 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2308 else
2309 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2312 /* Likewise chose the less restrictive of two > or >= comparisons. */
2313 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2314 && (code2 == GT_EXPR || code2 == GE_EXPR))
2316 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2317 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2318 else
2319 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2322 /* Check for singleton ranges. */
2323 else if (cmp == 0
2324 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2325 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2326 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2328 /* Check for less/greater pairs that don't restrict the range at all. */
2329 else if (cmp >= 0
2330 && (code1 == LT_EXPR || code1 == LE_EXPR)
2331 && (code2 == GT_EXPR || code2 == GE_EXPR))
2332 return boolean_true_node;
2333 else if (cmp <= 0
2334 && (code1 == GT_EXPR || code1 == GE_EXPR)
2335 && (code2 == LT_EXPR || code2 == LE_EXPR))
2336 return boolean_true_node;
2339 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2340 NAME's definition is a truth value. See if there are any simplifications
2341 that can be done against the NAME's definition. */
2342 if (TREE_CODE (op1a) == SSA_NAME
2343 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2344 && (integer_zerop (op1b) || integer_onep (op1b)))
2346 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2347 || (code1 == NE_EXPR && integer_onep (op1b)));
2348 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2349 switch (gimple_code (stmt))
2351 case GIMPLE_ASSIGN:
2352 /* Try to simplify by copy-propagating the definition. */
2353 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2355 case GIMPLE_PHI:
2356 /* If every argument to the PHI produces the same result when
2357 ORed with the second comparison, we win.
2358 Do not do this unless the type is bool since we need a bool
2359 result here anyway. */
2360 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2362 tree result = NULL_TREE;
2363 unsigned i;
2364 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2366 tree arg = gimple_phi_arg_def (stmt, i);
2368 /* If this PHI has itself as an argument, ignore it.
2369 If all the other args produce the same result,
2370 we're still OK. */
2371 if (arg == gimple_phi_result (stmt))
2372 continue;
2373 else if (TREE_CODE (arg) == INTEGER_CST)
2375 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2377 if (!result)
2378 result = boolean_true_node;
2379 else if (!integer_onep (result))
2380 return NULL_TREE;
2382 else if (!result)
2383 result = fold_build2 (code2, boolean_type_node,
2384 op2a, op2b);
2385 else if (!same_bool_comparison_p (result,
2386 code2, op2a, op2b))
2387 return NULL_TREE;
2389 else if (TREE_CODE (arg) == SSA_NAME
2390 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2392 tree temp;
2393 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2394 /* In simple cases we can look through PHI nodes,
2395 but we have to be careful with loops.
2396 See PR49073. */
2397 if (! dom_info_available_p (CDI_DOMINATORS)
2398 || gimple_bb (def_stmt) == gimple_bb (stmt)
2399 || dominated_by_p (CDI_DOMINATORS,
2400 gimple_bb (def_stmt),
2401 gimple_bb (stmt)))
2402 return NULL_TREE;
2403 temp = or_var_with_comparison (arg, invert, code2,
2404 op2a, op2b);
2405 if (!temp)
2406 return NULL_TREE;
2407 else if (!result)
2408 result = temp;
2409 else if (!same_bool_result_p (result, temp))
2410 return NULL_TREE;
2412 else
2413 return NULL_TREE;
2415 return result;
2418 default:
2419 break;
2422 return NULL_TREE;
2425 /* Try to simplify the OR of two comparisons, specified by
2426 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2427 If this can be simplified to a single expression (without requiring
2428 introducing more SSA variables to hold intermediate values),
2429 return the resulting tree. Otherwise return NULL_TREE.
2430 If the result expression is non-null, it has boolean type. */
2432 tree
2433 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2434 enum tree_code code2, tree op2a, tree op2b)
2436 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2437 if (t)
2438 return t;
2439 else
2440 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2444 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2446 Either NULL_TREE, a simplified but non-constant or a constant
2447 is returned.
2449 ??? This should go into a gimple-fold-inline.h file to be eventually
2450 privatized with the single valueize function used in the various TUs
2451 to avoid the indirect function call overhead. */
2453 tree
2454 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2456 location_t loc = gimple_location (stmt);
2457 switch (gimple_code (stmt))
2459 case GIMPLE_ASSIGN:
2461 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2463 switch (get_gimple_rhs_class (subcode))
2465 case GIMPLE_SINGLE_RHS:
2467 tree rhs = gimple_assign_rhs1 (stmt);
2468 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2470 if (TREE_CODE (rhs) == SSA_NAME)
2472 /* If the RHS is an SSA_NAME, return its known constant value,
2473 if any. */
2474 return (*valueize) (rhs);
2476 /* Handle propagating invariant addresses into address
2477 operations. */
2478 else if (TREE_CODE (rhs) == ADDR_EXPR
2479 && !is_gimple_min_invariant (rhs))
2481 HOST_WIDE_INT offset = 0;
2482 tree base;
2483 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2484 &offset,
2485 valueize);
2486 if (base
2487 && (CONSTANT_CLASS_P (base)
2488 || decl_address_invariant_p (base)))
2489 return build_invariant_address (TREE_TYPE (rhs),
2490 base, offset);
2492 else if (TREE_CODE (rhs) == CONSTRUCTOR
2493 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2494 && (CONSTRUCTOR_NELTS (rhs)
2495 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2497 unsigned i;
2498 tree val, *vec;
2500 vec = XALLOCAVEC (tree,
2501 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2502 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2504 val = (*valueize) (val);
2505 if (TREE_CODE (val) == INTEGER_CST
2506 || TREE_CODE (val) == REAL_CST
2507 || TREE_CODE (val) == FIXED_CST)
2508 vec[i] = val;
2509 else
2510 return NULL_TREE;
2513 return build_vector (TREE_TYPE (rhs), vec);
2516 if (kind == tcc_reference)
2518 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2519 || TREE_CODE (rhs) == REALPART_EXPR
2520 || TREE_CODE (rhs) == IMAGPART_EXPR)
2521 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2523 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2524 return fold_unary_loc (EXPR_LOCATION (rhs),
2525 TREE_CODE (rhs),
2526 TREE_TYPE (rhs), val);
2528 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2529 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2531 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2532 return fold_ternary_loc (EXPR_LOCATION (rhs),
2533 TREE_CODE (rhs),
2534 TREE_TYPE (rhs), val,
2535 TREE_OPERAND (rhs, 1),
2536 TREE_OPERAND (rhs, 2));
2538 else if (TREE_CODE (rhs) == MEM_REF
2539 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2541 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2542 if (TREE_CODE (val) == ADDR_EXPR
2543 && is_gimple_min_invariant (val))
2545 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2546 unshare_expr (val),
2547 TREE_OPERAND (rhs, 1));
2548 if (tem)
2549 rhs = tem;
2552 return fold_const_aggregate_ref_1 (rhs, valueize);
2554 else if (kind == tcc_declaration)
2555 return get_symbol_constant_value (rhs);
2556 return rhs;
2559 case GIMPLE_UNARY_RHS:
2561 /* Handle unary operators that can appear in GIMPLE form.
2562 Note that we know the single operand must be a constant,
2563 so this should almost always return a simplified RHS. */
2564 tree lhs = gimple_assign_lhs (stmt);
2565 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2567 /* Conversions are useless for CCP purposes if they are
2568 value-preserving. Thus the restrictions that
2569 useless_type_conversion_p places for restrict qualification
2570 of pointer types should not apply here.
2571 Substitution later will only substitute to allowed places. */
2572 if (CONVERT_EXPR_CODE_P (subcode)
2573 && POINTER_TYPE_P (TREE_TYPE (lhs))
2574 && POINTER_TYPE_P (TREE_TYPE (op0))
2575 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2576 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2577 && TYPE_MODE (TREE_TYPE (lhs))
2578 == TYPE_MODE (TREE_TYPE (op0)))
2579 return op0;
2581 return
2582 fold_unary_ignore_overflow_loc (loc, subcode,
2583 gimple_expr_type (stmt), op0);
2586 case GIMPLE_BINARY_RHS:
2588 /* Handle binary operators that can appear in GIMPLE form. */
2589 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2590 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2592 /* Translate &x + CST into an invariant form suitable for
2593 further propagation. */
2594 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2595 && TREE_CODE (op0) == ADDR_EXPR
2596 && TREE_CODE (op1) == INTEGER_CST)
2598 tree off = fold_convert (ptr_type_node, op1);
2599 return build_fold_addr_expr_loc
2600 (loc,
2601 fold_build2 (MEM_REF,
2602 TREE_TYPE (TREE_TYPE (op0)),
2603 unshare_expr (op0), off));
2606 return fold_binary_loc (loc, subcode,
2607 gimple_expr_type (stmt), op0, op1);
2610 case GIMPLE_TERNARY_RHS:
2612 /* Handle ternary operators that can appear in GIMPLE form. */
2613 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2614 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2615 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2617 /* Fold embedded expressions in ternary codes. */
2618 if ((subcode == COND_EXPR
2619 || subcode == VEC_COND_EXPR)
2620 && COMPARISON_CLASS_P (op0))
2622 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2623 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2624 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2625 TREE_TYPE (op0), op00, op01);
2626 if (tem)
2627 op0 = tem;
2630 return fold_ternary_loc (loc, subcode,
2631 gimple_expr_type (stmt), op0, op1, op2);
2634 default:
2635 gcc_unreachable ();
2639 case GIMPLE_CALL:
2641 tree fn;
2643 if (gimple_call_internal_p (stmt))
2644 /* No folding yet for these functions. */
2645 return NULL_TREE;
2647 fn = (*valueize) (gimple_call_fn (stmt));
2648 if (TREE_CODE (fn) == ADDR_EXPR
2649 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2650 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2652 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2653 tree call, retval;
2654 unsigned i;
2655 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2656 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2657 call = build_call_array_loc (loc,
2658 gimple_call_return_type (stmt),
2659 fn, gimple_call_num_args (stmt), args);
2660 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2661 if (retval)
2662 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2663 STRIP_NOPS (retval);
2664 return retval;
2666 return NULL_TREE;
2669 default:
2670 return NULL_TREE;
2674 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2675 Returns NULL_TREE if folding to a constant is not possible, otherwise
2676 returns a constant according to is_gimple_min_invariant. */
2678 tree
2679 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2681 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2682 if (res && is_gimple_min_invariant (res))
2683 return res;
2684 return NULL_TREE;
2688 /* The following set of functions are supposed to fold references using
2689 their constant initializers. */
2691 static tree fold_ctor_reference (tree type, tree ctor,
2692 unsigned HOST_WIDE_INT offset,
2693 unsigned HOST_WIDE_INT size, tree);
2695 /* See if we can find constructor defining value of BASE.
2696 When we know the consructor with constant offset (such as
2697 base is array[40] and we do know constructor of array), then
2698 BIT_OFFSET is adjusted accordingly.
2700 As a special case, return error_mark_node when constructor
2701 is not explicitly available, but it is known to be zero
2702 such as 'static const int a;'. */
2703 static tree
2704 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2705 tree (*valueize)(tree))
2707 HOST_WIDE_INT bit_offset2, size, max_size;
2708 if (TREE_CODE (base) == MEM_REF)
2710 if (!integer_zerop (TREE_OPERAND (base, 1)))
2712 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2713 return NULL_TREE;
2714 *bit_offset += (mem_ref_offset (base).low
2715 * BITS_PER_UNIT);
2718 if (valueize
2719 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2720 base = valueize (TREE_OPERAND (base, 0));
2721 if (!base || TREE_CODE (base) != ADDR_EXPR)
2722 return NULL_TREE;
2723 base = TREE_OPERAND (base, 0);
2726 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2727 DECL_INITIAL. If BASE is a nested reference into another
2728 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2729 the inner reference. */
2730 switch (TREE_CODE (base))
2732 case VAR_DECL:
2733 case CONST_DECL:
2735 tree init = ctor_for_folding (base);
2737 /* Our semantic is exact opposite of ctor_for_folding;
2738 NULL means unknown, while error_mark_node is 0. */
2739 if (init == error_mark_node)
2740 return NULL_TREE;
2741 if (!init)
2742 return error_mark_node;
2743 return init;
2746 case ARRAY_REF:
2747 case COMPONENT_REF:
2748 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2749 if (max_size == -1 || size != max_size)
2750 return NULL_TREE;
2751 *bit_offset += bit_offset2;
2752 return get_base_constructor (base, bit_offset, valueize);
2754 case STRING_CST:
2755 case CONSTRUCTOR:
2756 return base;
2758 default:
2759 return NULL_TREE;
2763 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2764 to the memory at bit OFFSET.
2766 We do only simple job of folding byte accesses. */
2768 static tree
2769 fold_string_cst_ctor_reference (tree type, tree ctor,
2770 unsigned HOST_WIDE_INT offset,
2771 unsigned HOST_WIDE_INT size)
2773 if (INTEGRAL_TYPE_P (type)
2774 && (TYPE_MODE (type)
2775 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2776 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2777 == MODE_INT)
2778 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2779 && size == BITS_PER_UNIT
2780 && !(offset % BITS_PER_UNIT))
2782 offset /= BITS_PER_UNIT;
2783 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2784 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2785 [offset]));
2786 /* Folding
2787 const char a[20]="hello";
2788 return a[10];
2790 might lead to offset greater than string length. In this case we
2791 know value is either initialized to 0 or out of bounds. Return 0
2792 in both cases. */
2793 return build_zero_cst (type);
2795 return NULL_TREE;
2798 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2799 SIZE to the memory at bit OFFSET. */
2801 static tree
2802 fold_array_ctor_reference (tree type, tree ctor,
2803 unsigned HOST_WIDE_INT offset,
2804 unsigned HOST_WIDE_INT size,
2805 tree from_decl)
2807 unsigned HOST_WIDE_INT cnt;
2808 tree cfield, cval;
2809 double_int low_bound, elt_size;
2810 double_int index, max_index;
2811 double_int access_index;
2812 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2813 HOST_WIDE_INT inner_offset;
2815 /* Compute low bound and elt size. */
2816 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2817 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2818 if (domain_type && TYPE_MIN_VALUE (domain_type))
2820 /* Static constructors for variably sized objects makes no sense. */
2821 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2822 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2823 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2825 else
2826 low_bound = double_int_zero;
2827 /* Static constructors for variably sized objects makes no sense. */
2828 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2829 == INTEGER_CST);
2830 elt_size =
2831 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2834 /* We can handle only constantly sized accesses that are known to not
2835 be larger than size of array element. */
2836 if (!TYPE_SIZE_UNIT (type)
2837 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2838 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2839 return NULL_TREE;
2841 /* Compute the array index we look for. */
2842 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2843 .udiv (elt_size, TRUNC_DIV_EXPR);
2844 access_index += low_bound;
2845 if (index_type)
2846 access_index = access_index.ext (TYPE_PRECISION (index_type),
2847 TYPE_UNSIGNED (index_type));
2849 /* And offset within the access. */
2850 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2852 /* See if the array field is large enough to span whole access. We do not
2853 care to fold accesses spanning multiple array indexes. */
2854 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2855 return NULL_TREE;
2857 index = low_bound - double_int_one;
2858 if (index_type)
2859 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2861 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2863 /* Array constructor might explicitely set index, or specify range
2864 or leave index NULL meaning that it is next index after previous
2865 one. */
2866 if (cfield)
2868 if (TREE_CODE (cfield) == INTEGER_CST)
2869 max_index = index = tree_to_double_int (cfield);
2870 else
2872 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2873 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2874 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2877 else
2879 index += double_int_one;
2880 if (index_type)
2881 index = index.ext (TYPE_PRECISION (index_type),
2882 TYPE_UNSIGNED (index_type));
2883 max_index = index;
2886 /* Do we have match? */
2887 if (access_index.cmp (index, 1) >= 0
2888 && access_index.cmp (max_index, 1) <= 0)
2889 return fold_ctor_reference (type, cval, inner_offset, size,
2890 from_decl);
2892 /* When memory is not explicitely mentioned in constructor,
2893 it is 0 (or out of range). */
2894 return build_zero_cst (type);
2897 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2898 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2900 static tree
2901 fold_nonarray_ctor_reference (tree type, tree ctor,
2902 unsigned HOST_WIDE_INT offset,
2903 unsigned HOST_WIDE_INT size,
2904 tree from_decl)
2906 unsigned HOST_WIDE_INT cnt;
2907 tree cfield, cval;
2909 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2910 cval)
2912 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2913 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2914 tree field_size = DECL_SIZE (cfield);
2915 double_int bitoffset;
2916 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2917 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2918 double_int bitoffset_end, access_end;
2920 /* Variable sized objects in static constructors makes no sense,
2921 but field_size can be NULL for flexible array members. */
2922 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2923 && TREE_CODE (byte_offset) == INTEGER_CST
2924 && (field_size != NULL_TREE
2925 ? TREE_CODE (field_size) == INTEGER_CST
2926 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2928 /* Compute bit offset of the field. */
2929 bitoffset = tree_to_double_int (field_offset)
2930 + byte_offset_cst * bits_per_unit_cst;
2931 /* Compute bit offset where the field ends. */
2932 if (field_size != NULL_TREE)
2933 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2934 else
2935 bitoffset_end = double_int_zero;
2937 access_end = double_int::from_uhwi (offset)
2938 + double_int::from_uhwi (size);
2940 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2941 [BITOFFSET, BITOFFSET_END)? */
2942 if (access_end.cmp (bitoffset, 0) > 0
2943 && (field_size == NULL_TREE
2944 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2946 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2947 /* We do have overlap. Now see if field is large enough to
2948 cover the access. Give up for accesses spanning multiple
2949 fields. */
2950 if (access_end.cmp (bitoffset_end, 0) > 0)
2951 return NULL_TREE;
2952 if (double_int::from_uhwi (offset).slt (bitoffset))
2953 return NULL_TREE;
2954 return fold_ctor_reference (type, cval,
2955 inner_offset.to_uhwi (), size,
2956 from_decl);
2959 /* When memory is not explicitely mentioned in constructor, it is 0. */
2960 return build_zero_cst (type);
2963 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2964 to the memory at bit OFFSET. */
2966 static tree
2967 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2968 unsigned HOST_WIDE_INT size, tree from_decl)
2970 tree ret;
2972 /* We found the field with exact match. */
2973 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2974 && !offset)
2975 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2977 /* We are at the end of walk, see if we can view convert the
2978 result. */
2979 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2980 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2981 && operand_equal_p (TYPE_SIZE (type),
2982 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2984 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2985 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2986 if (ret)
2987 STRIP_NOPS (ret);
2988 return ret;
2990 if (TREE_CODE (ctor) == STRING_CST)
2991 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2992 if (TREE_CODE (ctor) == CONSTRUCTOR)
2995 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2996 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2997 return fold_array_ctor_reference (type, ctor, offset, size,
2998 from_decl);
2999 else
3000 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3001 from_decl);
3004 return NULL_TREE;
3007 /* Return the tree representing the element referenced by T if T is an
3008 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3009 names using VALUEIZE. Return NULL_TREE otherwise. */
3011 tree
3012 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3014 tree ctor, idx, base;
3015 HOST_WIDE_INT offset, size, max_size;
3016 tree tem;
3018 if (TREE_THIS_VOLATILE (t))
3019 return NULL_TREE;
3021 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3022 return get_symbol_constant_value (t);
3024 tem = fold_read_from_constant_string (t);
3025 if (tem)
3026 return tem;
3028 switch (TREE_CODE (t))
3030 case ARRAY_REF:
3031 case ARRAY_RANGE_REF:
3032 /* Constant indexes are handled well by get_base_constructor.
3033 Only special case variable offsets.
3034 FIXME: This code can't handle nested references with variable indexes
3035 (they will be handled only by iteration of ccp). Perhaps we can bring
3036 get_ref_base_and_extent here and make it use a valueize callback. */
3037 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3038 && valueize
3039 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3040 && TREE_CODE (idx) == INTEGER_CST)
3042 tree low_bound, unit_size;
3043 double_int doffset;
3045 /* If the resulting bit-offset is constant, track it. */
3046 if ((low_bound = array_ref_low_bound (t),
3047 TREE_CODE (low_bound) == INTEGER_CST)
3048 && (unit_size = array_ref_element_size (t),
3049 host_integerp (unit_size, 1))
3050 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3051 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3052 doffset.fits_shwi ()))
3054 offset = doffset.to_shwi ();
3055 offset *= TREE_INT_CST_LOW (unit_size);
3056 offset *= BITS_PER_UNIT;
3058 base = TREE_OPERAND (t, 0);
3059 ctor = get_base_constructor (base, &offset, valueize);
3060 /* Empty constructor. Always fold to 0. */
3061 if (ctor == error_mark_node)
3062 return build_zero_cst (TREE_TYPE (t));
3063 /* Out of bound array access. Value is undefined,
3064 but don't fold. */
3065 if (offset < 0)
3066 return NULL_TREE;
3067 /* We can not determine ctor. */
3068 if (!ctor)
3069 return NULL_TREE;
3070 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3071 TREE_INT_CST_LOW (unit_size)
3072 * BITS_PER_UNIT,
3073 base);
3076 /* Fallthru. */
3078 case COMPONENT_REF:
3079 case BIT_FIELD_REF:
3080 case TARGET_MEM_REF:
3081 case MEM_REF:
3082 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3083 ctor = get_base_constructor (base, &offset, valueize);
3085 /* Empty constructor. Always fold to 0. */
3086 if (ctor == error_mark_node)
3087 return build_zero_cst (TREE_TYPE (t));
3088 /* We do not know precise address. */
3089 if (max_size == -1 || max_size != size)
3090 return NULL_TREE;
3091 /* We can not determine ctor. */
3092 if (!ctor)
3093 return NULL_TREE;
3095 /* Out of bound array access. Value is undefined, but don't fold. */
3096 if (offset < 0)
3097 return NULL_TREE;
3099 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3100 base);
3102 case REALPART_EXPR:
3103 case IMAGPART_EXPR:
3105 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3106 if (c && TREE_CODE (c) == COMPLEX_CST)
3107 return fold_build1_loc (EXPR_LOCATION (t),
3108 TREE_CODE (t), TREE_TYPE (t), c);
3109 break;
3112 default:
3113 break;
3116 return NULL_TREE;
3119 tree
3120 fold_const_aggregate_ref (tree t)
3122 return fold_const_aggregate_ref_1 (t, NULL);
3125 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3126 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3127 KNOWN_BINFO carries the binfo describing the true type of
3128 OBJ_TYPE_REF_OBJECT(REF). */
3130 tree
3131 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3133 unsigned HOST_WIDE_INT offset, size;
3134 tree v, fn, vtable, init;
3136 vtable = v = BINFO_VTABLE (known_binfo);
3137 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3138 if (!v)
3139 return NULL_TREE;
3141 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3143 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3144 v = TREE_OPERAND (v, 0);
3146 else
3147 offset = 0;
3149 if (TREE_CODE (v) != ADDR_EXPR)
3150 return NULL_TREE;
3151 v = TREE_OPERAND (v, 0);
3153 if (TREE_CODE (v) != VAR_DECL
3154 || !DECL_VIRTUAL_P (v))
3155 return NULL_TREE;
3156 init = ctor_for_folding (v);
3158 /* The virtual tables should always be born with constructors.
3159 and we always should assume that they are avaialble for
3160 folding. At the moment we do not stream them in all cases,
3161 but it should never happen that ctor seem unreachable. */
3162 gcc_assert (init);
3163 if (init == error_mark_node)
3165 gcc_assert (in_lto_p);
3166 return NULL_TREE;
3168 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3169 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3170 offset += token * size;
3171 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3172 offset, size, v);
3173 if (!fn || integer_zerop (fn))
3174 return NULL_TREE;
3175 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3176 || TREE_CODE (fn) == FDESC_EXPR);
3177 fn = TREE_OPERAND (fn, 0);
3178 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3180 /* When cgraph node is missing and function is not public, we cannot
3181 devirtualize. This can happen in WHOPR when the actual method
3182 ends up in other partition, because we found devirtualization
3183 possibility too late. */
3184 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3185 return NULL_TREE;
3187 /* Make sure we create a cgraph node for functions we'll reference.
3188 They can be non-existent if the reference comes from an entry
3189 of an external vtable for example. */
3190 cgraph_get_create_node (fn);
3192 return fn;
3195 /* Return true iff VAL is a gimple expression that is known to be
3196 non-negative. Restricted to floating-point inputs. */
3198 bool
3199 gimple_val_nonnegative_real_p (tree val)
3201 gimple def_stmt;
3203 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3205 /* Use existing logic for non-gimple trees. */
3206 if (tree_expr_nonnegative_p (val))
3207 return true;
3209 if (TREE_CODE (val) != SSA_NAME)
3210 return false;
3212 /* Currently we look only at the immediately defining statement
3213 to make this determination, since recursion on defining
3214 statements of operands can lead to quadratic behavior in the
3215 worst case. This is expected to catch almost all occurrences
3216 in practice. It would be possible to implement limited-depth
3217 recursion if important cases are lost. Alternatively, passes
3218 that need this information (such as the pow/powi lowering code
3219 in the cse_sincos pass) could be revised to provide it through
3220 dataflow propagation. */
3222 def_stmt = SSA_NAME_DEF_STMT (val);
3224 if (is_gimple_assign (def_stmt))
3226 tree op0, op1;
3228 /* See fold-const.c:tree_expr_nonnegative_p for additional
3229 cases that could be handled with recursion. */
3231 switch (gimple_assign_rhs_code (def_stmt))
3233 case ABS_EXPR:
3234 /* Always true for floating-point operands. */
3235 return true;
3237 case MULT_EXPR:
3238 /* True if the two operands are identical (since we are
3239 restricted to floating-point inputs). */
3240 op0 = gimple_assign_rhs1 (def_stmt);
3241 op1 = gimple_assign_rhs2 (def_stmt);
3243 if (op0 == op1
3244 || operand_equal_p (op0, op1, 0))
3245 return true;
3247 default:
3248 return false;
3251 else if (is_gimple_call (def_stmt))
3253 tree fndecl = gimple_call_fndecl (def_stmt);
3254 if (fndecl
3255 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3257 tree arg1;
3259 switch (DECL_FUNCTION_CODE (fndecl))
3261 CASE_FLT_FN (BUILT_IN_ACOS):
3262 CASE_FLT_FN (BUILT_IN_ACOSH):
3263 CASE_FLT_FN (BUILT_IN_CABS):
3264 CASE_FLT_FN (BUILT_IN_COSH):
3265 CASE_FLT_FN (BUILT_IN_ERFC):
3266 CASE_FLT_FN (BUILT_IN_EXP):
3267 CASE_FLT_FN (BUILT_IN_EXP10):
3268 CASE_FLT_FN (BUILT_IN_EXP2):
3269 CASE_FLT_FN (BUILT_IN_FABS):
3270 CASE_FLT_FN (BUILT_IN_FDIM):
3271 CASE_FLT_FN (BUILT_IN_HYPOT):
3272 CASE_FLT_FN (BUILT_IN_POW10):
3273 return true;
3275 CASE_FLT_FN (BUILT_IN_SQRT):
3276 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3277 nonnegative inputs. */
3278 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3279 return true;
3281 break;
3283 CASE_FLT_FN (BUILT_IN_POWI):
3284 /* True if the second argument is an even integer. */
3285 arg1 = gimple_call_arg (def_stmt, 1);
3287 if (TREE_CODE (arg1) == INTEGER_CST
3288 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3289 return true;
3291 break;
3293 CASE_FLT_FN (BUILT_IN_POW):
3294 /* True if the second argument is an even integer-valued
3295 real. */
3296 arg1 = gimple_call_arg (def_stmt, 1);
3298 if (TREE_CODE (arg1) == REAL_CST)
3300 REAL_VALUE_TYPE c;
3301 HOST_WIDE_INT n;
3303 c = TREE_REAL_CST (arg1);
3304 n = real_to_integer (&c);
3306 if ((n & 1) == 0)
3308 REAL_VALUE_TYPE cint;
3309 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3310 if (real_identical (&c, &cint))
3311 return true;
3315 break;
3317 default:
3318 return false;
3323 return false;