* configure: Regenerated.
[official-gcc.git] / gcc / gimple-fold.c
blob4dba726f274af966345af20833733148502a6261
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "dumpfile.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
31 #include "target.h"
32 #include "gimple-fold.h"
33 #include "langhooks.h"
35 /* Return true when DECL can be referenced from current unit.
36 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
37 We can get declarations that are not possible to reference for various
38 reasons:
40 1) When analyzing C++ virtual tables.
41 C++ virtual tables do have known constructors even
42 when they are keyed to other compilation unit.
43 Those tables can contain pointers to methods and vars
44 in other units. Those methods have both STATIC and EXTERNAL
45 set.
46 2) In WHOPR mode devirtualization might lead to reference
47 to method that was partitioned elsehwere.
48 In this case we have static VAR_DECL or FUNCTION_DECL
49 that has no corresponding callgraph/varpool node
50 declaring the body.
51 3) COMDAT functions referred by external vtables that
52 we devirtualize only during final copmilation stage.
53 At this time we already decided that we will not output
54 the function body and thus we can't reference the symbol
55 directly. */
57 static bool
58 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
60 struct varpool_node *vnode;
61 struct cgraph_node *node;
62 symtab_node snode;
64 /* We will later output the initializer, so we can refer to it.
65 So we are concerned only when DECL comes from initializer of
66 external var. */
67 if (!from_decl
68 || TREE_CODE (from_decl) != VAR_DECL
69 || !DECL_EXTERNAL (from_decl)
70 || (flag_ltrans
71 && symtab_get_node (from_decl)->symbol.in_other_partition))
72 return true;
73 /* We are concerned only about static/external vars and functions. */
74 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
75 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
76 return true;
77 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
78 are always safe. */
79 if (DECL_EXTERNAL (decl)
80 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
81 return true;
82 /* We are folding reference from external vtable. The vtable may reffer
83 to a symbol keyed to other compilation unit. The other compilation
84 unit may be in separate DSO and the symbol may be hidden. */
85 if (DECL_VISIBILITY_SPECIFIED (decl)
86 && DECL_EXTERNAL (decl)
87 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition))
88 return false;
89 /* When function is public, we always can introduce new reference.
90 Exception are the COMDAT functions where introducing a direct
91 reference imply need to include function body in the curren tunit. */
92 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
93 return true;
94 /* We are not at ltrans stage; so don't worry about WHOPR.
95 Also when still gimplifying all referred comdat functions will be
96 produced.
98 As observed in PR20991 for already optimized out comdat virtual functions
99 it may be tempting to not necessarily give up because the copy will be
100 output elsewhere when corresponding vtable is output.
101 This is however not possible - ABI specify that COMDATs are output in
102 units where they are used and when the other unit was compiled with LTO
103 it is possible that vtable was kept public while the function itself
104 was privatized. */
105 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
106 return true;
108 /* OK we are seeing either COMDAT or static variable. In this case we must
109 check that the definition is still around so we can refer it. */
110 if (TREE_CODE (decl) == FUNCTION_DECL)
112 node = cgraph_get_node (decl);
113 /* Check that we still have function body and that we didn't took
114 the decision to eliminate offline copy of the function yet.
115 The second is important when devirtualization happens during final
116 compilation stage when making a new reference no longer makes callee
117 to be compiled. */
118 if (!node || !node->analyzed || node->global.inlined_to)
120 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
121 return false;
124 else if (TREE_CODE (decl) == VAR_DECL)
126 vnode = varpool_get_node (decl);
127 if (!vnode || !vnode->analyzed)
129 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
130 return false;
133 return true;
136 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
137 acceptable form for is_gimple_min_invariant.
138 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
140 tree
141 canonicalize_constructor_val (tree cval, tree from_decl)
143 STRIP_USELESS_TYPE_CONVERSION (cval);
144 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
145 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
147 tree ptr = TREE_OPERAND (cval, 0);
148 if (is_gimple_min_invariant (ptr))
149 cval = build1_loc (EXPR_LOCATION (cval),
150 ADDR_EXPR, TREE_TYPE (ptr),
151 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
152 ptr,
153 fold_convert (ptr_type_node,
154 TREE_OPERAND (cval, 1))));
156 if (TREE_CODE (cval) == ADDR_EXPR)
158 tree base = NULL_TREE;
159 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
161 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
162 if (base)
163 TREE_OPERAND (cval, 0) = base;
165 else
166 base = get_base_address (TREE_OPERAND (cval, 0));
167 if (!base)
168 return NULL_TREE;
170 if ((TREE_CODE (base) == VAR_DECL
171 || TREE_CODE (base) == FUNCTION_DECL)
172 && !can_refer_decl_in_current_unit_p (base, from_decl))
173 return NULL_TREE;
174 if (TREE_CODE (base) == VAR_DECL)
175 TREE_ADDRESSABLE (base) = 1;
176 else if (TREE_CODE (base) == FUNCTION_DECL)
178 /* Make sure we create a cgraph node for functions we'll reference.
179 They can be non-existent if the reference comes from an entry
180 of an external vtable for example. */
181 cgraph_get_create_node (base);
183 /* Fixup types in global initializers. */
184 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
185 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
187 return cval;
190 /* If SYM is a constant variable with known value, return the value.
191 NULL_TREE is returned otherwise. */
193 tree
194 get_symbol_constant_value (tree sym)
196 if (const_value_known_p (sym))
198 tree val = DECL_INITIAL (sym);
199 if (val)
201 val = canonicalize_constructor_val (val, sym);
202 if (val && is_gimple_min_invariant (val))
203 return val;
204 else
205 return NULL_TREE;
207 /* Variables declared 'const' without an initializer
208 have zero as the initializer if they may not be
209 overridden at link or run time. */
210 if (!val
211 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
212 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
213 return build_zero_cst (TREE_TYPE (sym));
216 return NULL_TREE;
221 /* Subroutine of fold_stmt. We perform several simplifications of the
222 memory reference tree EXPR and make sure to re-gimplify them properly
223 after propagation of constant addresses. IS_LHS is true if the
224 reference is supposed to be an lvalue. */
226 static tree
227 maybe_fold_reference (tree expr, bool is_lhs)
229 tree *t = &expr;
230 tree result;
232 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
233 || TREE_CODE (expr) == REALPART_EXPR
234 || TREE_CODE (expr) == IMAGPART_EXPR)
235 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
236 return fold_unary_loc (EXPR_LOCATION (expr),
237 TREE_CODE (expr),
238 TREE_TYPE (expr),
239 TREE_OPERAND (expr, 0));
240 else if (TREE_CODE (expr) == BIT_FIELD_REF
241 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
242 return fold_ternary_loc (EXPR_LOCATION (expr),
243 TREE_CODE (expr),
244 TREE_TYPE (expr),
245 TREE_OPERAND (expr, 0),
246 TREE_OPERAND (expr, 1),
247 TREE_OPERAND (expr, 2));
249 while (handled_component_p (*t))
250 t = &TREE_OPERAND (*t, 0);
252 /* Canonicalize MEM_REFs invariant address operand. Do this first
253 to avoid feeding non-canonical MEM_REFs elsewhere. */
254 if (TREE_CODE (*t) == MEM_REF
255 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
257 bool volatile_p = TREE_THIS_VOLATILE (*t);
258 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
259 TREE_OPERAND (*t, 0),
260 TREE_OPERAND (*t, 1));
261 if (tem)
263 TREE_THIS_VOLATILE (tem) = volatile_p;
264 *t = tem;
265 tem = maybe_fold_reference (expr, is_lhs);
266 if (tem)
267 return tem;
268 return expr;
272 if (!is_lhs
273 && (result = fold_const_aggregate_ref (expr))
274 && is_gimple_min_invariant (result))
275 return result;
277 /* Fold back MEM_REFs to reference trees. */
278 if (TREE_CODE (*t) == MEM_REF
279 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
280 && integer_zerop (TREE_OPERAND (*t, 1))
281 && (TREE_THIS_VOLATILE (*t)
282 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
283 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
284 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
285 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
286 /* We have to look out here to not drop a required conversion
287 from the rhs to the lhs if is_lhs, but we don't have the
288 rhs here to verify that. Thus require strict type
289 compatibility. */
290 && types_compatible_p (TREE_TYPE (*t),
291 TREE_TYPE (TREE_OPERAND
292 (TREE_OPERAND (*t, 0), 0))))
294 tree tem;
295 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
296 tem = maybe_fold_reference (expr, is_lhs);
297 if (tem)
298 return tem;
299 return expr;
301 else if (TREE_CODE (*t) == TARGET_MEM_REF)
303 tree tem = maybe_fold_tmr (*t);
304 if (tem)
306 *t = tem;
307 tem = maybe_fold_reference (expr, is_lhs);
308 if (tem)
309 return tem;
310 return expr;
314 return NULL_TREE;
318 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
319 replacement rhs for the statement or NULL_TREE if no simplification
320 could be made. It is assumed that the operands have been previously
321 folded. */
323 static tree
324 fold_gimple_assign (gimple_stmt_iterator *si)
326 gimple stmt = gsi_stmt (*si);
327 enum tree_code subcode = gimple_assign_rhs_code (stmt);
328 location_t loc = gimple_location (stmt);
330 tree result = NULL_TREE;
332 switch (get_gimple_rhs_class (subcode))
334 case GIMPLE_SINGLE_RHS:
336 tree rhs = gimple_assign_rhs1 (stmt);
338 if (REFERENCE_CLASS_P (rhs))
339 return maybe_fold_reference (rhs, false);
341 else if (TREE_CODE (rhs) == ADDR_EXPR)
343 tree ref = TREE_OPERAND (rhs, 0);
344 tree tem = maybe_fold_reference (ref, true);
345 if (tem
346 && TREE_CODE (tem) == MEM_REF
347 && integer_zerop (TREE_OPERAND (tem, 1)))
348 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
349 else if (tem)
350 result = fold_convert (TREE_TYPE (rhs),
351 build_fold_addr_expr_loc (loc, tem));
352 else if (TREE_CODE (ref) == MEM_REF
353 && integer_zerop (TREE_OPERAND (ref, 1)))
354 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
357 else if (TREE_CODE (rhs) == CONSTRUCTOR
358 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
359 && (CONSTRUCTOR_NELTS (rhs)
360 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
362 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
363 unsigned i;
364 tree val;
366 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
367 if (TREE_CODE (val) != INTEGER_CST
368 && TREE_CODE (val) != REAL_CST
369 && TREE_CODE (val) != FIXED_CST)
370 return NULL_TREE;
372 return build_vector_from_ctor (TREE_TYPE (rhs),
373 CONSTRUCTOR_ELTS (rhs));
376 else if (DECL_P (rhs))
377 return unshare_expr (get_symbol_constant_value (rhs));
379 /* If we couldn't fold the RHS, hand over to the generic
380 fold routines. */
381 if (result == NULL_TREE)
382 result = fold (rhs);
384 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
385 that may have been added by fold, and "useless" type
386 conversions that might now be apparent due to propagation. */
387 STRIP_USELESS_TYPE_CONVERSION (result);
389 if (result != rhs && valid_gimple_rhs_p (result))
390 return result;
392 return NULL_TREE;
394 break;
396 case GIMPLE_UNARY_RHS:
398 tree rhs = gimple_assign_rhs1 (stmt);
400 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
401 if (result)
403 /* If the operation was a conversion do _not_ mark a
404 resulting constant with TREE_OVERFLOW if the original
405 constant was not. These conversions have implementation
406 defined behavior and retaining the TREE_OVERFLOW flag
407 here would confuse later passes such as VRP. */
408 if (CONVERT_EXPR_CODE_P (subcode)
409 && TREE_CODE (result) == INTEGER_CST
410 && TREE_CODE (rhs) == INTEGER_CST)
411 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
413 STRIP_USELESS_TYPE_CONVERSION (result);
414 if (valid_gimple_rhs_p (result))
415 return result;
418 break;
420 case GIMPLE_BINARY_RHS:
421 /* Try to canonicalize for boolean-typed X the comparisons
422 X == 0, X == 1, X != 0, and X != 1. */
423 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
424 || gimple_assign_rhs_code (stmt) == NE_EXPR)
426 tree lhs = gimple_assign_lhs (stmt);
427 tree op1 = gimple_assign_rhs1 (stmt);
428 tree op2 = gimple_assign_rhs2 (stmt);
429 tree type = TREE_TYPE (op1);
431 /* Check whether the comparison operands are of the same boolean
432 type as the result type is.
433 Check that second operand is an integer-constant with value
434 one or zero. */
435 if (TREE_CODE (op2) == INTEGER_CST
436 && (integer_zerop (op2) || integer_onep (op2))
437 && useless_type_conversion_p (TREE_TYPE (lhs), type))
439 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
440 bool is_logical_not = false;
442 /* X == 0 and X != 1 is a logical-not.of X
443 X == 1 and X != 0 is X */
444 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
445 || (cmp_code == NE_EXPR && integer_onep (op2)))
446 is_logical_not = true;
448 if (is_logical_not == false)
449 result = op1;
450 /* Only for one-bit precision typed X the transformation
451 !X -> ~X is valied. */
452 else if (TYPE_PRECISION (type) == 1)
453 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
454 type, op1);
455 /* Otherwise we use !X -> X ^ 1. */
456 else
457 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
458 type, op1, build_int_cst (type, 1));
463 if (!result)
464 result = fold_binary_loc (loc, subcode,
465 TREE_TYPE (gimple_assign_lhs (stmt)),
466 gimple_assign_rhs1 (stmt),
467 gimple_assign_rhs2 (stmt));
469 if (result)
471 STRIP_USELESS_TYPE_CONVERSION (result);
472 if (valid_gimple_rhs_p (result))
473 return result;
475 break;
477 case GIMPLE_TERNARY_RHS:
478 /* Try to fold a conditional expression. */
479 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
481 tree op0 = gimple_assign_rhs1 (stmt);
482 tree tem;
483 bool set = false;
484 location_t cond_loc = gimple_location (stmt);
486 if (COMPARISON_CLASS_P (op0))
488 fold_defer_overflow_warnings ();
489 tem = fold_binary_loc (cond_loc,
490 TREE_CODE (op0), TREE_TYPE (op0),
491 TREE_OPERAND (op0, 0),
492 TREE_OPERAND (op0, 1));
493 /* This is actually a conditional expression, not a GIMPLE
494 conditional statement, however, the valid_gimple_rhs_p
495 test still applies. */
496 set = (tem && is_gimple_condexpr (tem)
497 && valid_gimple_rhs_p (tem));
498 fold_undefer_overflow_warnings (set, stmt, 0);
500 else if (is_gimple_min_invariant (op0))
502 tem = op0;
503 set = true;
505 else
506 return NULL_TREE;
508 if (set)
509 result = fold_build3_loc (cond_loc, COND_EXPR,
510 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
511 gimple_assign_rhs2 (stmt),
512 gimple_assign_rhs3 (stmt));
515 if (!result)
516 result = fold_ternary_loc (loc, subcode,
517 TREE_TYPE (gimple_assign_lhs (stmt)),
518 gimple_assign_rhs1 (stmt),
519 gimple_assign_rhs2 (stmt),
520 gimple_assign_rhs3 (stmt));
522 if (result)
524 STRIP_USELESS_TYPE_CONVERSION (result);
525 if (valid_gimple_rhs_p (result))
526 return result;
528 break;
530 case GIMPLE_INVALID_RHS:
531 gcc_unreachable ();
534 return NULL_TREE;
537 /* Attempt to fold a conditional statement. Return true if any changes were
538 made. We only attempt to fold the condition expression, and do not perform
539 any transformation that would require alteration of the cfg. It is
540 assumed that the operands have been previously folded. */
542 static bool
543 fold_gimple_cond (gimple stmt)
545 tree result = fold_binary_loc (gimple_location (stmt),
546 gimple_cond_code (stmt),
547 boolean_type_node,
548 gimple_cond_lhs (stmt),
549 gimple_cond_rhs (stmt));
551 if (result)
553 STRIP_USELESS_TYPE_CONVERSION (result);
554 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
556 gimple_cond_set_condition_from_tree (stmt, result);
557 return true;
561 return false;
564 /* Convert EXPR into a GIMPLE value suitable for substitution on the
565 RHS of an assignment. Insert the necessary statements before
566 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
567 is replaced. If the call is expected to produces a result, then it
568 is replaced by an assignment of the new RHS to the result variable.
569 If the result is to be ignored, then the call is replaced by a
570 GIMPLE_NOP. A proper VDEF chain is retained by making the first
571 VUSE and the last VDEF of the whole sequence be the same as the replaced
572 statement and using new SSA names for stores in between. */
574 void
575 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
577 tree lhs;
578 gimple stmt, new_stmt;
579 gimple_stmt_iterator i;
580 gimple_seq stmts = NULL;
581 struct gimplify_ctx gctx;
582 gimple laststore;
583 tree reaching_vuse;
585 stmt = gsi_stmt (*si_p);
587 gcc_assert (is_gimple_call (stmt));
589 push_gimplify_context (&gctx);
590 gctx.into_ssa = gimple_in_ssa_p (cfun);
592 lhs = gimple_call_lhs (stmt);
593 if (lhs == NULL_TREE)
595 gimplify_and_add (expr, &stmts);
596 /* We can end up with folding a memcpy of an empty class assignment
597 which gets optimized away by C++ gimplification. */
598 if (gimple_seq_empty_p (stmts))
600 pop_gimplify_context (NULL);
601 if (gimple_in_ssa_p (cfun))
603 unlink_stmt_vdef (stmt);
604 release_defs (stmt);
606 gsi_remove (si_p, true);
607 return;
610 else
612 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
613 new_stmt = gimple_build_assign (lhs, tmp);
614 i = gsi_last (stmts);
615 gsi_insert_after_without_update (&i, new_stmt,
616 GSI_CONTINUE_LINKING);
619 pop_gimplify_context (NULL);
621 if (gimple_has_location (stmt))
622 annotate_all_with_location (stmts, gimple_location (stmt));
624 /* First iterate over the replacement statements backward, assigning
625 virtual operands to their defining statements. */
626 laststore = NULL;
627 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
629 new_stmt = gsi_stmt (i);
630 if ((gimple_assign_single_p (new_stmt)
631 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
632 || (is_gimple_call (new_stmt)
633 && (gimple_call_flags (new_stmt)
634 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
636 tree vdef;
637 if (!laststore)
638 vdef = gimple_vdef (stmt);
639 else
640 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
641 gimple_set_vdef (new_stmt, vdef);
642 if (vdef && TREE_CODE (vdef) == SSA_NAME)
643 SSA_NAME_DEF_STMT (vdef) = new_stmt;
644 laststore = new_stmt;
648 /* Second iterate over the statements forward, assigning virtual
649 operands to their uses. */
650 reaching_vuse = gimple_vuse (stmt);
651 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
653 new_stmt = gsi_stmt (i);
654 /* If the new statement possibly has a VUSE, update it with exact SSA
655 name we know will reach this one. */
656 if (gimple_has_mem_ops (new_stmt))
657 gimple_set_vuse (new_stmt, reaching_vuse);
658 gimple_set_modified (new_stmt, true);
659 if (gimple_vdef (new_stmt))
660 reaching_vuse = gimple_vdef (new_stmt);
663 /* If the new sequence does not do a store release the virtual
664 definition of the original statement. */
665 if (reaching_vuse
666 && reaching_vuse == gimple_vuse (stmt))
668 tree vdef = gimple_vdef (stmt);
669 if (vdef
670 && TREE_CODE (vdef) == SSA_NAME)
672 unlink_stmt_vdef (stmt);
673 release_ssa_name (vdef);
677 /* Finally replace the original statement with the sequence. */
678 gsi_replace_with_seq (si_p, stmts, false);
681 /* Return the string length, maximum string length or maximum value of
682 ARG in LENGTH.
683 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
684 is not NULL and, for TYPE == 0, its value is not equal to the length
685 we determine or if we are unable to determine the length or value,
686 return false. VISITED is a bitmap of visited variables.
687 TYPE is 0 if string length should be returned, 1 for maximum string
688 length and 2 for maximum value ARG can have. */
690 static bool
691 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
693 tree var, val;
694 gimple def_stmt;
696 if (TREE_CODE (arg) != SSA_NAME)
698 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
699 if (TREE_CODE (arg) == ADDR_EXPR
700 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
701 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
703 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
704 if (TREE_CODE (aop0) == INDIRECT_REF
705 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
706 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
707 length, visited, type);
710 if (type == 2)
712 val = arg;
713 if (TREE_CODE (val) != INTEGER_CST
714 || tree_int_cst_sgn (val) < 0)
715 return false;
717 else
718 val = c_strlen (arg, 1);
719 if (!val)
720 return false;
722 if (*length)
724 if (type > 0)
726 if (TREE_CODE (*length) != INTEGER_CST
727 || TREE_CODE (val) != INTEGER_CST)
728 return false;
730 if (tree_int_cst_lt (*length, val))
731 *length = val;
732 return true;
734 else if (simple_cst_equal (val, *length) != 1)
735 return false;
738 *length = val;
739 return true;
742 /* If ARG is registered for SSA update we cannot look at its defining
743 statement. */
744 if (name_registered_for_update_p (arg))
745 return false;
747 /* If we were already here, break the infinite cycle. */
748 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
749 return true;
751 var = arg;
752 def_stmt = SSA_NAME_DEF_STMT (var);
754 switch (gimple_code (def_stmt))
756 case GIMPLE_ASSIGN:
757 /* The RHS of the statement defining VAR must either have a
758 constant length or come from another SSA_NAME with a constant
759 length. */
760 if (gimple_assign_single_p (def_stmt)
761 || gimple_assign_unary_nop_p (def_stmt))
763 tree rhs = gimple_assign_rhs1 (def_stmt);
764 return get_maxval_strlen (rhs, length, visited, type);
766 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
768 tree op2 = gimple_assign_rhs2 (def_stmt);
769 tree op3 = gimple_assign_rhs3 (def_stmt);
770 return get_maxval_strlen (op2, length, visited, type)
771 && get_maxval_strlen (op3, length, visited, type);
773 return false;
775 case GIMPLE_PHI:
777 /* All the arguments of the PHI node must have the same constant
778 length. */
779 unsigned i;
781 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
783 tree arg = gimple_phi_arg (def_stmt, i)->def;
785 /* If this PHI has itself as an argument, we cannot
786 determine the string length of this argument. However,
787 if we can find a constant string length for the other
788 PHI args then we can still be sure that this is a
789 constant string length. So be optimistic and just
790 continue with the next argument. */
791 if (arg == gimple_phi_result (def_stmt))
792 continue;
794 if (!get_maxval_strlen (arg, length, visited, type))
795 return false;
798 return true;
800 default:
801 return false;
806 /* Fold builtin call in statement STMT. Returns a simplified tree.
807 We may return a non-constant expression, including another call
808 to a different function and with different arguments, e.g.,
809 substituting memcpy for strcpy when the string length is known.
810 Note that some builtins expand into inline code that may not
811 be valid in GIMPLE. Callers must take care. */
813 tree
814 gimple_fold_builtin (gimple stmt)
816 tree result, val[3];
817 tree callee, a;
818 int arg_idx, type;
819 bitmap visited;
820 bool ignore;
821 int nargs;
822 location_t loc = gimple_location (stmt);
824 gcc_assert (is_gimple_call (stmt));
826 ignore = (gimple_call_lhs (stmt) == NULL);
828 /* First try the generic builtin folder. If that succeeds, return the
829 result directly. */
830 result = fold_call_stmt (stmt, ignore);
831 if (result)
833 if (ignore)
834 STRIP_NOPS (result);
835 return result;
838 /* Ignore MD builtins. */
839 callee = gimple_call_fndecl (stmt);
840 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
841 return NULL_TREE;
843 /* Give up for always_inline inline builtins until they are
844 inlined. */
845 if (avoid_folding_inline_builtin (callee))
846 return NULL_TREE;
848 /* If the builtin could not be folded, and it has no argument list,
849 we're done. */
850 nargs = gimple_call_num_args (stmt);
851 if (nargs == 0)
852 return NULL_TREE;
854 /* Limit the work only for builtins we know how to simplify. */
855 switch (DECL_FUNCTION_CODE (callee))
857 case BUILT_IN_STRLEN:
858 case BUILT_IN_FPUTS:
859 case BUILT_IN_FPUTS_UNLOCKED:
860 arg_idx = 0;
861 type = 0;
862 break;
863 case BUILT_IN_STRCPY:
864 case BUILT_IN_STRNCPY:
865 arg_idx = 1;
866 type = 0;
867 break;
868 case BUILT_IN_MEMCPY_CHK:
869 case BUILT_IN_MEMPCPY_CHK:
870 case BUILT_IN_MEMMOVE_CHK:
871 case BUILT_IN_MEMSET_CHK:
872 case BUILT_IN_STRNCPY_CHK:
873 case BUILT_IN_STPNCPY_CHK:
874 arg_idx = 2;
875 type = 2;
876 break;
877 case BUILT_IN_STRCPY_CHK:
878 case BUILT_IN_STPCPY_CHK:
879 arg_idx = 1;
880 type = 1;
881 break;
882 case BUILT_IN_SNPRINTF_CHK:
883 case BUILT_IN_VSNPRINTF_CHK:
884 arg_idx = 1;
885 type = 2;
886 break;
887 default:
888 return NULL_TREE;
891 if (arg_idx >= nargs)
892 return NULL_TREE;
894 /* Try to use the dataflow information gathered by the CCP process. */
895 visited = BITMAP_ALLOC (NULL);
896 bitmap_clear (visited);
898 memset (val, 0, sizeof (val));
899 a = gimple_call_arg (stmt, arg_idx);
900 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
901 val[arg_idx] = NULL_TREE;
903 BITMAP_FREE (visited);
905 result = NULL_TREE;
906 switch (DECL_FUNCTION_CODE (callee))
908 case BUILT_IN_STRLEN:
909 if (val[0] && nargs == 1)
911 tree new_val =
912 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
914 /* If the result is not a valid gimple value, or not a cast
915 of a valid gimple value, then we cannot use the result. */
916 if (is_gimple_val (new_val)
917 || (CONVERT_EXPR_P (new_val)
918 && is_gimple_val (TREE_OPERAND (new_val, 0))))
919 return new_val;
921 break;
923 case BUILT_IN_STRCPY:
924 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
925 result = fold_builtin_strcpy (loc, callee,
926 gimple_call_arg (stmt, 0),
927 gimple_call_arg (stmt, 1),
928 val[1]);
929 break;
931 case BUILT_IN_STRNCPY:
932 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
933 result = fold_builtin_strncpy (loc, callee,
934 gimple_call_arg (stmt, 0),
935 gimple_call_arg (stmt, 1),
936 gimple_call_arg (stmt, 2),
937 val[1]);
938 break;
940 case BUILT_IN_FPUTS:
941 if (nargs == 2)
942 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
943 gimple_call_arg (stmt, 1),
944 ignore, false, val[0]);
945 break;
947 case BUILT_IN_FPUTS_UNLOCKED:
948 if (nargs == 2)
949 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
950 gimple_call_arg (stmt, 1),
951 ignore, true, val[0]);
952 break;
954 case BUILT_IN_MEMCPY_CHK:
955 case BUILT_IN_MEMPCPY_CHK:
956 case BUILT_IN_MEMMOVE_CHK:
957 case BUILT_IN_MEMSET_CHK:
958 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
959 result = fold_builtin_memory_chk (loc, callee,
960 gimple_call_arg (stmt, 0),
961 gimple_call_arg (stmt, 1),
962 gimple_call_arg (stmt, 2),
963 gimple_call_arg (stmt, 3),
964 val[2], ignore,
965 DECL_FUNCTION_CODE (callee));
966 break;
968 case BUILT_IN_STRCPY_CHK:
969 case BUILT_IN_STPCPY_CHK:
970 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
971 result = fold_builtin_stxcpy_chk (loc, callee,
972 gimple_call_arg (stmt, 0),
973 gimple_call_arg (stmt, 1),
974 gimple_call_arg (stmt, 2),
975 val[1], ignore,
976 DECL_FUNCTION_CODE (callee));
977 break;
979 case BUILT_IN_STRNCPY_CHK:
980 case BUILT_IN_STPNCPY_CHK:
981 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
982 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
983 gimple_call_arg (stmt, 1),
984 gimple_call_arg (stmt, 2),
985 gimple_call_arg (stmt, 3),
986 val[2], ignore,
987 DECL_FUNCTION_CODE (callee));
988 break;
990 case BUILT_IN_SNPRINTF_CHK:
991 case BUILT_IN_VSNPRINTF_CHK:
992 if (val[1] && is_gimple_val (val[1]))
993 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
994 DECL_FUNCTION_CODE (callee));
995 break;
997 default:
998 gcc_unreachable ();
1001 if (result && ignore)
1002 result = fold_ignored_result (result);
1003 return result;
1007 /* Return a binfo to be used for devirtualization of calls based on an object
1008 represented by a declaration (i.e. a global or automatically allocated one)
1009 or NULL if it cannot be found or is not safe. CST is expected to be an
1010 ADDR_EXPR of such object or the function will return NULL. Currently it is
1011 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1013 tree
1014 gimple_extract_devirt_binfo_from_cst (tree cst)
1016 HOST_WIDE_INT offset, size, max_size;
1017 tree base, type, expected_type, binfo;
1018 bool last_artificial = false;
1020 if (!flag_devirtualize
1021 || TREE_CODE (cst) != ADDR_EXPR
1022 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1023 return NULL_TREE;
1025 cst = TREE_OPERAND (cst, 0);
1026 expected_type = TREE_TYPE (cst);
1027 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1028 type = TREE_TYPE (base);
1029 if (!DECL_P (base)
1030 || max_size == -1
1031 || max_size != size
1032 || TREE_CODE (type) != RECORD_TYPE)
1033 return NULL_TREE;
1035 /* Find the sub-object the constant actually refers to and mark whether it is
1036 an artificial one (as opposed to a user-defined one). */
1037 while (true)
1039 HOST_WIDE_INT pos, size;
1040 tree fld;
1042 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1043 break;
1044 if (offset < 0)
1045 return NULL_TREE;
1047 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1049 if (TREE_CODE (fld) != FIELD_DECL)
1050 continue;
1052 pos = int_bit_position (fld);
1053 size = tree_low_cst (DECL_SIZE (fld), 1);
1054 if (pos <= offset && (pos + size) > offset)
1055 break;
1057 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1058 return NULL_TREE;
1060 last_artificial = DECL_ARTIFICIAL (fld);
1061 type = TREE_TYPE (fld);
1062 offset -= pos;
1064 /* Artificial sub-objects are ancestors, we do not want to use them for
1065 devirtualization, at least not here. */
1066 if (last_artificial)
1067 return NULL_TREE;
1068 binfo = TYPE_BINFO (type);
1069 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1070 return NULL_TREE;
1071 else
1072 return binfo;
1075 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1076 The statement may be replaced by another statement, e.g., if the call
1077 simplifies to a constant value. Return true if any changes were made.
1078 It is assumed that the operands have been previously folded. */
1080 static bool
1081 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1083 gimple stmt = gsi_stmt (*gsi);
1084 tree callee;
1085 bool changed = false;
1086 unsigned i;
1088 /* Fold *& in call arguments. */
1089 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1090 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1092 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1093 if (tmp)
1095 gimple_call_set_arg (stmt, i, tmp);
1096 changed = true;
1100 /* Check for virtual calls that became direct calls. */
1101 callee = gimple_call_fn (stmt);
1102 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1104 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1106 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1107 changed = true;
1109 else
1111 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1112 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1113 if (binfo)
1115 HOST_WIDE_INT token
1116 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1117 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1118 if (fndecl)
1120 gimple_call_set_fndecl (stmt, fndecl);
1121 changed = true;
1127 if (inplace)
1128 return changed;
1130 /* Check for builtins that CCP can handle using information not
1131 available in the generic fold routines. */
1132 callee = gimple_call_fndecl (stmt);
1133 if (callee && DECL_BUILT_IN (callee))
1135 tree result = gimple_fold_builtin (stmt);
1136 if (result)
1138 if (!update_call_from_tree (gsi, result))
1139 gimplify_and_update_call_from_tree (gsi, result);
1140 changed = true;
1144 return changed;
1147 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1148 distinguishes both cases. */
1150 static bool
1151 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1153 bool changed = false;
1154 gimple stmt = gsi_stmt (*gsi);
1155 unsigned i;
1156 gimple_stmt_iterator gsinext = *gsi;
1157 gimple next_stmt;
1159 gsi_next (&gsinext);
1160 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1162 /* Fold the main computation performed by the statement. */
1163 switch (gimple_code (stmt))
1165 case GIMPLE_ASSIGN:
1167 unsigned old_num_ops = gimple_num_ops (stmt);
1168 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1169 tree lhs = gimple_assign_lhs (stmt);
1170 tree new_rhs;
1171 /* First canonicalize operand order. This avoids building new
1172 trees if this is the only thing fold would later do. */
1173 if ((commutative_tree_code (subcode)
1174 || commutative_ternary_tree_code (subcode))
1175 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1176 gimple_assign_rhs2 (stmt), false))
1178 tree tem = gimple_assign_rhs1 (stmt);
1179 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1180 gimple_assign_set_rhs2 (stmt, tem);
1181 changed = true;
1183 new_rhs = fold_gimple_assign (gsi);
1184 if (new_rhs
1185 && !useless_type_conversion_p (TREE_TYPE (lhs),
1186 TREE_TYPE (new_rhs)))
1187 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1188 if (new_rhs
1189 && (!inplace
1190 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1192 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1193 changed = true;
1195 break;
1198 case GIMPLE_COND:
1199 changed |= fold_gimple_cond (stmt);
1200 break;
1202 case GIMPLE_CALL:
1203 changed |= gimple_fold_call (gsi, inplace);
1204 break;
1206 case GIMPLE_ASM:
1207 /* Fold *& in asm operands. */
1209 size_t noutputs;
1210 const char **oconstraints;
1211 const char *constraint;
1212 bool allows_mem, allows_reg;
1214 noutputs = gimple_asm_noutputs (stmt);
1215 oconstraints = XALLOCAVEC (const char *, noutputs);
1217 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1219 tree link = gimple_asm_output_op (stmt, i);
1220 tree op = TREE_VALUE (link);
1221 oconstraints[i]
1222 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1223 if (REFERENCE_CLASS_P (op)
1224 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1226 TREE_VALUE (link) = op;
1227 changed = true;
1230 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1232 tree link = gimple_asm_input_op (stmt, i);
1233 tree op = TREE_VALUE (link);
1234 constraint
1235 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1236 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1237 oconstraints, &allows_mem, &allows_reg);
1238 if (REFERENCE_CLASS_P (op)
1239 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1240 != NULL_TREE)
1242 TREE_VALUE (link) = op;
1243 changed = true;
1247 break;
1249 case GIMPLE_DEBUG:
1250 if (gimple_debug_bind_p (stmt))
1252 tree val = gimple_debug_bind_get_value (stmt);
1253 if (val
1254 && REFERENCE_CLASS_P (val))
1256 tree tem = maybe_fold_reference (val, false);
1257 if (tem)
1259 gimple_debug_bind_set_value (stmt, tem);
1260 changed = true;
1263 else if (val
1264 && TREE_CODE (val) == ADDR_EXPR)
1266 tree ref = TREE_OPERAND (val, 0);
1267 tree tem = maybe_fold_reference (ref, false);
1268 if (tem)
1270 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1271 gimple_debug_bind_set_value (stmt, tem);
1272 changed = true;
1276 break;
1278 default:;
1281 /* If stmt folds into nothing and it was the last stmt in a bb,
1282 don't call gsi_stmt. */
1283 if (gsi_end_p (*gsi))
1285 gcc_assert (next_stmt == NULL);
1286 return changed;
1289 stmt = gsi_stmt (*gsi);
1291 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1292 as we'd changing the next stmt. */
1293 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1295 tree lhs = gimple_get_lhs (stmt);
1296 if (lhs && REFERENCE_CLASS_P (lhs))
1298 tree new_lhs = maybe_fold_reference (lhs, true);
1299 if (new_lhs)
1301 gimple_set_lhs (stmt, new_lhs);
1302 changed = true;
1307 return changed;
1310 /* Fold the statement pointed to by GSI. In some cases, this function may
1311 replace the whole statement with a new one. Returns true iff folding
1312 makes any changes.
1313 The statement pointed to by GSI should be in valid gimple form but may
1314 be in unfolded state as resulting from for example constant propagation
1315 which can produce *&x = 0. */
1317 bool
1318 fold_stmt (gimple_stmt_iterator *gsi)
1320 return fold_stmt_1 (gsi, false);
1323 /* Perform the minimal folding on statement *GSI. Only operations like
1324 *&x created by constant propagation are handled. The statement cannot
1325 be replaced with a new one. Return true if the statement was
1326 changed, false otherwise.
1327 The statement *GSI should be in valid gimple form but may
1328 be in unfolded state as resulting from for example constant propagation
1329 which can produce *&x = 0. */
1331 bool
1332 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1334 gimple stmt = gsi_stmt (*gsi);
1335 bool changed = fold_stmt_1 (gsi, true);
1336 gcc_assert (gsi_stmt (*gsi) == stmt);
1337 return changed;
1340 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1341 if EXPR is null or we don't know how.
1342 If non-null, the result always has boolean type. */
1344 static tree
1345 canonicalize_bool (tree expr, bool invert)
1347 if (!expr)
1348 return NULL_TREE;
1349 else if (invert)
1351 if (integer_nonzerop (expr))
1352 return boolean_false_node;
1353 else if (integer_zerop (expr))
1354 return boolean_true_node;
1355 else if (TREE_CODE (expr) == SSA_NAME)
1356 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1357 build_int_cst (TREE_TYPE (expr), 0));
1358 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1359 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1360 boolean_type_node,
1361 TREE_OPERAND (expr, 0),
1362 TREE_OPERAND (expr, 1));
1363 else
1364 return NULL_TREE;
1366 else
1368 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1369 return expr;
1370 if (integer_nonzerop (expr))
1371 return boolean_true_node;
1372 else if (integer_zerop (expr))
1373 return boolean_false_node;
1374 else if (TREE_CODE (expr) == SSA_NAME)
1375 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1376 build_int_cst (TREE_TYPE (expr), 0));
1377 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1378 return fold_build2 (TREE_CODE (expr),
1379 boolean_type_node,
1380 TREE_OPERAND (expr, 0),
1381 TREE_OPERAND (expr, 1));
1382 else
1383 return NULL_TREE;
1387 /* Check to see if a boolean expression EXPR is logically equivalent to the
1388 comparison (OP1 CODE OP2). Check for various identities involving
1389 SSA_NAMEs. */
1391 static bool
1392 same_bool_comparison_p (const_tree expr, enum tree_code code,
1393 const_tree op1, const_tree op2)
1395 gimple s;
1397 /* The obvious case. */
1398 if (TREE_CODE (expr) == code
1399 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1400 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1401 return true;
1403 /* Check for comparing (name, name != 0) and the case where expr
1404 is an SSA_NAME with a definition matching the comparison. */
1405 if (TREE_CODE (expr) == SSA_NAME
1406 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1408 if (operand_equal_p (expr, op1, 0))
1409 return ((code == NE_EXPR && integer_zerop (op2))
1410 || (code == EQ_EXPR && integer_nonzerop (op2)));
1411 s = SSA_NAME_DEF_STMT (expr);
1412 if (is_gimple_assign (s)
1413 && gimple_assign_rhs_code (s) == code
1414 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1415 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1416 return true;
1419 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1420 of name is a comparison, recurse. */
1421 if (TREE_CODE (op1) == SSA_NAME
1422 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1424 s = SSA_NAME_DEF_STMT (op1);
1425 if (is_gimple_assign (s)
1426 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1428 enum tree_code c = gimple_assign_rhs_code (s);
1429 if ((c == NE_EXPR && integer_zerop (op2))
1430 || (c == EQ_EXPR && integer_nonzerop (op2)))
1431 return same_bool_comparison_p (expr, c,
1432 gimple_assign_rhs1 (s),
1433 gimple_assign_rhs2 (s));
1434 if ((c == EQ_EXPR && integer_zerop (op2))
1435 || (c == NE_EXPR && integer_nonzerop (op2)))
1436 return same_bool_comparison_p (expr,
1437 invert_tree_comparison (c, false),
1438 gimple_assign_rhs1 (s),
1439 gimple_assign_rhs2 (s));
1442 return false;
1445 /* Check to see if two boolean expressions OP1 and OP2 are logically
1446 equivalent. */
1448 static bool
1449 same_bool_result_p (const_tree op1, const_tree op2)
1451 /* Simple cases first. */
1452 if (operand_equal_p (op1, op2, 0))
1453 return true;
1455 /* Check the cases where at least one of the operands is a comparison.
1456 These are a bit smarter than operand_equal_p in that they apply some
1457 identifies on SSA_NAMEs. */
1458 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1459 && same_bool_comparison_p (op1, TREE_CODE (op2),
1460 TREE_OPERAND (op2, 0),
1461 TREE_OPERAND (op2, 1)))
1462 return true;
1463 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1464 && same_bool_comparison_p (op2, TREE_CODE (op1),
1465 TREE_OPERAND (op1, 0),
1466 TREE_OPERAND (op1, 1)))
1467 return true;
1469 /* Default case. */
1470 return false;
1473 /* Forward declarations for some mutually recursive functions. */
1475 static tree
1476 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1477 enum tree_code code2, tree op2a, tree op2b);
1478 static tree
1479 and_var_with_comparison (tree var, bool invert,
1480 enum tree_code code2, tree op2a, tree op2b);
1481 static tree
1482 and_var_with_comparison_1 (gimple stmt,
1483 enum tree_code code2, tree op2a, tree op2b);
1484 static tree
1485 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1486 enum tree_code code2, tree op2a, tree op2b);
1487 static tree
1488 or_var_with_comparison (tree var, bool invert,
1489 enum tree_code code2, tree op2a, tree op2b);
1490 static tree
1491 or_var_with_comparison_1 (gimple stmt,
1492 enum tree_code code2, tree op2a, tree op2b);
1494 /* Helper function for and_comparisons_1: try to simplify the AND of the
1495 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1496 If INVERT is true, invert the value of the VAR before doing the AND.
1497 Return NULL_EXPR if we can't simplify this to a single expression. */
1499 static tree
1500 and_var_with_comparison (tree var, bool invert,
1501 enum tree_code code2, tree op2a, tree op2b)
1503 tree t;
1504 gimple stmt = SSA_NAME_DEF_STMT (var);
1506 /* We can only deal with variables whose definitions are assignments. */
1507 if (!is_gimple_assign (stmt))
1508 return NULL_TREE;
1510 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1511 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1512 Then we only have to consider the simpler non-inverted cases. */
1513 if (invert)
1514 t = or_var_with_comparison_1 (stmt,
1515 invert_tree_comparison (code2, false),
1516 op2a, op2b);
1517 else
1518 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1519 return canonicalize_bool (t, invert);
1522 /* Try to simplify the AND of the ssa variable defined by the assignment
1523 STMT with the comparison specified by (OP2A CODE2 OP2B).
1524 Return NULL_EXPR if we can't simplify this to a single expression. */
1526 static tree
1527 and_var_with_comparison_1 (gimple stmt,
1528 enum tree_code code2, tree op2a, tree op2b)
1530 tree var = gimple_assign_lhs (stmt);
1531 tree true_test_var = NULL_TREE;
1532 tree false_test_var = NULL_TREE;
1533 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1535 /* Check for identities like (var AND (var == 0)) => false. */
1536 if (TREE_CODE (op2a) == SSA_NAME
1537 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1539 if ((code2 == NE_EXPR && integer_zerop (op2b))
1540 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1542 true_test_var = op2a;
1543 if (var == true_test_var)
1544 return var;
1546 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1547 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1549 false_test_var = op2a;
1550 if (var == false_test_var)
1551 return boolean_false_node;
1555 /* If the definition is a comparison, recurse on it. */
1556 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1558 tree t = and_comparisons_1 (innercode,
1559 gimple_assign_rhs1 (stmt),
1560 gimple_assign_rhs2 (stmt),
1561 code2,
1562 op2a,
1563 op2b);
1564 if (t)
1565 return t;
1568 /* If the definition is an AND or OR expression, we may be able to
1569 simplify by reassociating. */
1570 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1571 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1573 tree inner1 = gimple_assign_rhs1 (stmt);
1574 tree inner2 = gimple_assign_rhs2 (stmt);
1575 gimple s;
1576 tree t;
1577 tree partial = NULL_TREE;
1578 bool is_and = (innercode == BIT_AND_EXPR);
1580 /* Check for boolean identities that don't require recursive examination
1581 of inner1/inner2:
1582 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1583 inner1 AND (inner1 OR inner2) => inner1
1584 !inner1 AND (inner1 AND inner2) => false
1585 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1586 Likewise for similar cases involving inner2. */
1587 if (inner1 == true_test_var)
1588 return (is_and ? var : inner1);
1589 else if (inner2 == true_test_var)
1590 return (is_and ? var : inner2);
1591 else if (inner1 == false_test_var)
1592 return (is_and
1593 ? boolean_false_node
1594 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1595 else if (inner2 == false_test_var)
1596 return (is_and
1597 ? boolean_false_node
1598 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1600 /* Next, redistribute/reassociate the AND across the inner tests.
1601 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1602 if (TREE_CODE (inner1) == SSA_NAME
1603 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1604 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1605 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1606 gimple_assign_rhs1 (s),
1607 gimple_assign_rhs2 (s),
1608 code2, op2a, op2b)))
1610 /* Handle the AND case, where we are reassociating:
1611 (inner1 AND inner2) AND (op2a code2 op2b)
1612 => (t AND inner2)
1613 If the partial result t is a constant, we win. Otherwise
1614 continue on to try reassociating with the other inner test. */
1615 if (is_and)
1617 if (integer_onep (t))
1618 return inner2;
1619 else if (integer_zerop (t))
1620 return boolean_false_node;
1623 /* Handle the OR case, where we are redistributing:
1624 (inner1 OR inner2) AND (op2a code2 op2b)
1625 => (t OR (inner2 AND (op2a code2 op2b))) */
1626 else if (integer_onep (t))
1627 return boolean_true_node;
1629 /* Save partial result for later. */
1630 partial = t;
1633 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1634 if (TREE_CODE (inner2) == SSA_NAME
1635 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1636 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1637 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1638 gimple_assign_rhs1 (s),
1639 gimple_assign_rhs2 (s),
1640 code2, op2a, op2b)))
1642 /* Handle the AND case, where we are reassociating:
1643 (inner1 AND inner2) AND (op2a code2 op2b)
1644 => (inner1 AND t) */
1645 if (is_and)
1647 if (integer_onep (t))
1648 return inner1;
1649 else if (integer_zerop (t))
1650 return boolean_false_node;
1651 /* If both are the same, we can apply the identity
1652 (x AND x) == x. */
1653 else if (partial && same_bool_result_p (t, partial))
1654 return t;
1657 /* Handle the OR case. where we are redistributing:
1658 (inner1 OR inner2) AND (op2a code2 op2b)
1659 => (t OR (inner1 AND (op2a code2 op2b)))
1660 => (t OR partial) */
1661 else
1663 if (integer_onep (t))
1664 return boolean_true_node;
1665 else if (partial)
1667 /* We already got a simplification for the other
1668 operand to the redistributed OR expression. The
1669 interesting case is when at least one is false.
1670 Or, if both are the same, we can apply the identity
1671 (x OR x) == x. */
1672 if (integer_zerop (partial))
1673 return t;
1674 else if (integer_zerop (t))
1675 return partial;
1676 else if (same_bool_result_p (t, partial))
1677 return t;
1682 return NULL_TREE;
1685 /* Try to simplify the AND of two comparisons defined by
1686 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1687 If this can be done without constructing an intermediate value,
1688 return the resulting tree; otherwise NULL_TREE is returned.
1689 This function is deliberately asymmetric as it recurses on SSA_DEFs
1690 in the first comparison but not the second. */
1692 static tree
1693 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1694 enum tree_code code2, tree op2a, tree op2b)
1696 tree truth_type = boolean_type_node;
1697 if (TREE_CODE (TREE_TYPE (op1a)) == VECTOR_TYPE)
1699 tree vec_type = TREE_TYPE (op1a);
1700 tree elem = lang_hooks.types.type_for_size
1701 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (vec_type))), 0);
1702 truth_type = build_opaque_vector_type (elem,
1703 TYPE_VECTOR_SUBPARTS (vec_type));
1706 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1707 if (operand_equal_p (op1a, op2a, 0)
1708 && operand_equal_p (op1b, op2b, 0))
1710 /* Result will be either NULL_TREE, or a combined comparison. */
1711 tree t = combine_comparisons (UNKNOWN_LOCATION,
1712 TRUTH_ANDIF_EXPR, code1, code2,
1713 truth_type, op1a, op1b);
1714 if (t)
1715 return t;
1718 /* Likewise the swapped case of the above. */
1719 if (operand_equal_p (op1a, op2b, 0)
1720 && operand_equal_p (op1b, op2a, 0))
1722 /* Result will be either NULL_TREE, or a combined comparison. */
1723 tree t = combine_comparisons (UNKNOWN_LOCATION,
1724 TRUTH_ANDIF_EXPR, code1,
1725 swap_tree_comparison (code2),
1726 truth_type, op1a, op1b);
1727 if (t)
1728 return t;
1731 /* If both comparisons are of the same value against constants, we might
1732 be able to merge them. */
1733 if (operand_equal_p (op1a, op2a, 0)
1734 && TREE_CODE (op1b) == INTEGER_CST
1735 && TREE_CODE (op2b) == INTEGER_CST)
1737 int cmp = tree_int_cst_compare (op1b, op2b);
1739 /* If we have (op1a == op1b), we should either be able to
1740 return that or FALSE, depending on whether the constant op1b
1741 also satisfies the other comparison against op2b. */
1742 if (code1 == EQ_EXPR)
1744 bool done = true;
1745 bool val;
1746 switch (code2)
1748 case EQ_EXPR: val = (cmp == 0); break;
1749 case NE_EXPR: val = (cmp != 0); break;
1750 case LT_EXPR: val = (cmp < 0); break;
1751 case GT_EXPR: val = (cmp > 0); break;
1752 case LE_EXPR: val = (cmp <= 0); break;
1753 case GE_EXPR: val = (cmp >= 0); break;
1754 default: done = false;
1756 if (done)
1758 if (val)
1759 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1760 else
1761 return boolean_false_node;
1764 /* Likewise if the second comparison is an == comparison. */
1765 else if (code2 == EQ_EXPR)
1767 bool done = true;
1768 bool val;
1769 switch (code1)
1771 case EQ_EXPR: val = (cmp == 0); break;
1772 case NE_EXPR: val = (cmp != 0); break;
1773 case LT_EXPR: val = (cmp > 0); break;
1774 case GT_EXPR: val = (cmp < 0); break;
1775 case LE_EXPR: val = (cmp >= 0); break;
1776 case GE_EXPR: val = (cmp <= 0); break;
1777 default: done = false;
1779 if (done)
1781 if (val)
1782 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1783 else
1784 return boolean_false_node;
1788 /* Same business with inequality tests. */
1789 else if (code1 == NE_EXPR)
1791 bool val;
1792 switch (code2)
1794 case EQ_EXPR: val = (cmp != 0); break;
1795 case NE_EXPR: val = (cmp == 0); break;
1796 case LT_EXPR: val = (cmp >= 0); break;
1797 case GT_EXPR: val = (cmp <= 0); break;
1798 case LE_EXPR: val = (cmp > 0); break;
1799 case GE_EXPR: val = (cmp < 0); break;
1800 default:
1801 val = false;
1803 if (val)
1804 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1806 else if (code2 == NE_EXPR)
1808 bool val;
1809 switch (code1)
1811 case EQ_EXPR: val = (cmp == 0); break;
1812 case NE_EXPR: val = (cmp != 0); break;
1813 case LT_EXPR: val = (cmp <= 0); break;
1814 case GT_EXPR: val = (cmp >= 0); break;
1815 case LE_EXPR: val = (cmp < 0); break;
1816 case GE_EXPR: val = (cmp > 0); break;
1817 default:
1818 val = false;
1820 if (val)
1821 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1824 /* Chose the more restrictive of two < or <= comparisons. */
1825 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1826 && (code2 == LT_EXPR || code2 == LE_EXPR))
1828 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1829 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1830 else
1831 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1834 /* Likewise chose the more restrictive of two > or >= comparisons. */
1835 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1836 && (code2 == GT_EXPR || code2 == GE_EXPR))
1838 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1839 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1840 else
1841 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1844 /* Check for singleton ranges. */
1845 else if (cmp == 0
1846 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1847 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1848 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1850 /* Check for disjoint ranges. */
1851 else if (cmp <= 0
1852 && (code1 == LT_EXPR || code1 == LE_EXPR)
1853 && (code2 == GT_EXPR || code2 == GE_EXPR))
1854 return boolean_false_node;
1855 else if (cmp >= 0
1856 && (code1 == GT_EXPR || code1 == GE_EXPR)
1857 && (code2 == LT_EXPR || code2 == LE_EXPR))
1858 return boolean_false_node;
1861 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1862 NAME's definition is a truth value. See if there are any simplifications
1863 that can be done against the NAME's definition. */
1864 if (TREE_CODE (op1a) == SSA_NAME
1865 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1866 && (integer_zerop (op1b) || integer_onep (op1b)))
1868 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1869 || (code1 == NE_EXPR && integer_onep (op1b)));
1870 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1871 switch (gimple_code (stmt))
1873 case GIMPLE_ASSIGN:
1874 /* Try to simplify by copy-propagating the definition. */
1875 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1877 case GIMPLE_PHI:
1878 /* If every argument to the PHI produces the same result when
1879 ANDed with the second comparison, we win.
1880 Do not do this unless the type is bool since we need a bool
1881 result here anyway. */
1882 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1884 tree result = NULL_TREE;
1885 unsigned i;
1886 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1888 tree arg = gimple_phi_arg_def (stmt, i);
1890 /* If this PHI has itself as an argument, ignore it.
1891 If all the other args produce the same result,
1892 we're still OK. */
1893 if (arg == gimple_phi_result (stmt))
1894 continue;
1895 else if (TREE_CODE (arg) == INTEGER_CST)
1897 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1899 if (!result)
1900 result = boolean_false_node;
1901 else if (!integer_zerop (result))
1902 return NULL_TREE;
1904 else if (!result)
1905 result = fold_build2 (code2, boolean_type_node,
1906 op2a, op2b);
1907 else if (!same_bool_comparison_p (result,
1908 code2, op2a, op2b))
1909 return NULL_TREE;
1911 else if (TREE_CODE (arg) == SSA_NAME
1912 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1914 tree temp;
1915 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1916 /* In simple cases we can look through PHI nodes,
1917 but we have to be careful with loops.
1918 See PR49073. */
1919 if (! dom_info_available_p (CDI_DOMINATORS)
1920 || gimple_bb (def_stmt) == gimple_bb (stmt)
1921 || dominated_by_p (CDI_DOMINATORS,
1922 gimple_bb (def_stmt),
1923 gimple_bb (stmt)))
1924 return NULL_TREE;
1925 temp = and_var_with_comparison (arg, invert, code2,
1926 op2a, op2b);
1927 if (!temp)
1928 return NULL_TREE;
1929 else if (!result)
1930 result = temp;
1931 else if (!same_bool_result_p (result, temp))
1932 return NULL_TREE;
1934 else
1935 return NULL_TREE;
1937 return result;
1940 default:
1941 break;
1944 return NULL_TREE;
1947 /* Try to simplify the AND of two comparisons, specified by
1948 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1949 If this can be simplified to a single expression (without requiring
1950 introducing more SSA variables to hold intermediate values),
1951 return the resulting tree. Otherwise return NULL_TREE.
1952 If the result expression is non-null, it has boolean type. */
1954 tree
1955 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1956 enum tree_code code2, tree op2a, tree op2b)
1958 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1959 if (t)
1960 return t;
1961 else
1962 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1965 /* Helper function for or_comparisons_1: try to simplify the OR of the
1966 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1967 If INVERT is true, invert the value of VAR before doing the OR.
1968 Return NULL_EXPR if we can't simplify this to a single expression. */
1970 static tree
1971 or_var_with_comparison (tree var, bool invert,
1972 enum tree_code code2, tree op2a, tree op2b)
1974 tree t;
1975 gimple stmt = SSA_NAME_DEF_STMT (var);
1977 /* We can only deal with variables whose definitions are assignments. */
1978 if (!is_gimple_assign (stmt))
1979 return NULL_TREE;
1981 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1982 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1983 Then we only have to consider the simpler non-inverted cases. */
1984 if (invert)
1985 t = and_var_with_comparison_1 (stmt,
1986 invert_tree_comparison (code2, false),
1987 op2a, op2b);
1988 else
1989 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1990 return canonicalize_bool (t, invert);
1993 /* Try to simplify the OR of the ssa variable defined by the assignment
1994 STMT with the comparison specified by (OP2A CODE2 OP2B).
1995 Return NULL_EXPR if we can't simplify this to a single expression. */
1997 static tree
1998 or_var_with_comparison_1 (gimple stmt,
1999 enum tree_code code2, tree op2a, tree op2b)
2001 tree var = gimple_assign_lhs (stmt);
2002 tree true_test_var = NULL_TREE;
2003 tree false_test_var = NULL_TREE;
2004 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2006 /* Check for identities like (var OR (var != 0)) => true . */
2007 if (TREE_CODE (op2a) == SSA_NAME
2008 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2010 if ((code2 == NE_EXPR && integer_zerop (op2b))
2011 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2013 true_test_var = op2a;
2014 if (var == true_test_var)
2015 return var;
2017 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2018 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2020 false_test_var = op2a;
2021 if (var == false_test_var)
2022 return boolean_true_node;
2026 /* If the definition is a comparison, recurse on it. */
2027 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2029 tree t = or_comparisons_1 (innercode,
2030 gimple_assign_rhs1 (stmt),
2031 gimple_assign_rhs2 (stmt),
2032 code2,
2033 op2a,
2034 op2b);
2035 if (t)
2036 return t;
2039 /* If the definition is an AND or OR expression, we may be able to
2040 simplify by reassociating. */
2041 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2042 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2044 tree inner1 = gimple_assign_rhs1 (stmt);
2045 tree inner2 = gimple_assign_rhs2 (stmt);
2046 gimple s;
2047 tree t;
2048 tree partial = NULL_TREE;
2049 bool is_or = (innercode == BIT_IOR_EXPR);
2051 /* Check for boolean identities that don't require recursive examination
2052 of inner1/inner2:
2053 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2054 inner1 OR (inner1 AND inner2) => inner1
2055 !inner1 OR (inner1 OR inner2) => true
2056 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2058 if (inner1 == true_test_var)
2059 return (is_or ? var : inner1);
2060 else if (inner2 == true_test_var)
2061 return (is_or ? var : inner2);
2062 else if (inner1 == false_test_var)
2063 return (is_or
2064 ? boolean_true_node
2065 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2066 else if (inner2 == false_test_var)
2067 return (is_or
2068 ? boolean_true_node
2069 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2071 /* Next, redistribute/reassociate the OR across the inner tests.
2072 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2073 if (TREE_CODE (inner1) == SSA_NAME
2074 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2075 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2076 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2077 gimple_assign_rhs1 (s),
2078 gimple_assign_rhs2 (s),
2079 code2, op2a, op2b)))
2081 /* Handle the OR case, where we are reassociating:
2082 (inner1 OR inner2) OR (op2a code2 op2b)
2083 => (t OR inner2)
2084 If the partial result t is a constant, we win. Otherwise
2085 continue on to try reassociating with the other inner test. */
2086 if (is_or)
2088 if (integer_onep (t))
2089 return boolean_true_node;
2090 else if (integer_zerop (t))
2091 return inner2;
2094 /* Handle the AND case, where we are redistributing:
2095 (inner1 AND inner2) OR (op2a code2 op2b)
2096 => (t AND (inner2 OR (op2a code op2b))) */
2097 else if (integer_zerop (t))
2098 return boolean_false_node;
2100 /* Save partial result for later. */
2101 partial = t;
2104 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2105 if (TREE_CODE (inner2) == SSA_NAME
2106 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2107 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2108 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2109 gimple_assign_rhs1 (s),
2110 gimple_assign_rhs2 (s),
2111 code2, op2a, op2b)))
2113 /* Handle the OR case, where we are reassociating:
2114 (inner1 OR inner2) OR (op2a code2 op2b)
2115 => (inner1 OR t)
2116 => (t OR partial) */
2117 if (is_or)
2119 if (integer_zerop (t))
2120 return inner1;
2121 else if (integer_onep (t))
2122 return boolean_true_node;
2123 /* If both are the same, we can apply the identity
2124 (x OR x) == x. */
2125 else if (partial && same_bool_result_p (t, partial))
2126 return t;
2129 /* Handle the AND case, where we are redistributing:
2130 (inner1 AND inner2) OR (op2a code2 op2b)
2131 => (t AND (inner1 OR (op2a code2 op2b)))
2132 => (t AND partial) */
2133 else
2135 if (integer_zerop (t))
2136 return boolean_false_node;
2137 else if (partial)
2139 /* We already got a simplification for the other
2140 operand to the redistributed AND expression. The
2141 interesting case is when at least one is true.
2142 Or, if both are the same, we can apply the identity
2143 (x AND x) == x. */
2144 if (integer_onep (partial))
2145 return t;
2146 else if (integer_onep (t))
2147 return partial;
2148 else if (same_bool_result_p (t, partial))
2149 return t;
2154 return NULL_TREE;
2157 /* Try to simplify the OR of two comparisons defined by
2158 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2159 If this can be done without constructing an intermediate value,
2160 return the resulting tree; otherwise NULL_TREE is returned.
2161 This function is deliberately asymmetric as it recurses on SSA_DEFs
2162 in the first comparison but not the second. */
2164 static tree
2165 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2166 enum tree_code code2, tree op2a, tree op2b)
2168 tree truth_type = boolean_type_node;
2169 if (TREE_CODE (TREE_TYPE (op1a)) == VECTOR_TYPE)
2171 tree vec_type = TREE_TYPE (op1a);
2172 tree elem = lang_hooks.types.type_for_size
2173 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (vec_type))), 0);
2174 truth_type = build_opaque_vector_type (elem,
2175 TYPE_VECTOR_SUBPARTS (vec_type));
2178 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2179 if (operand_equal_p (op1a, op2a, 0)
2180 && operand_equal_p (op1b, op2b, 0))
2182 /* Result will be either NULL_TREE, or a combined comparison. */
2183 tree t = combine_comparisons (UNKNOWN_LOCATION,
2184 TRUTH_ORIF_EXPR, code1, code2,
2185 truth_type, op1a, op1b);
2186 if (t)
2187 return t;
2190 /* Likewise the swapped case of the above. */
2191 if (operand_equal_p (op1a, op2b, 0)
2192 && operand_equal_p (op1b, op2a, 0))
2194 /* Result will be either NULL_TREE, or a combined comparison. */
2195 tree t = combine_comparisons (UNKNOWN_LOCATION,
2196 TRUTH_ORIF_EXPR, code1,
2197 swap_tree_comparison (code2),
2198 truth_type, op1a, op1b);
2199 if (t)
2200 return t;
2203 /* If both comparisons are of the same value against constants, we might
2204 be able to merge them. */
2205 if (operand_equal_p (op1a, op2a, 0)
2206 && TREE_CODE (op1b) == INTEGER_CST
2207 && TREE_CODE (op2b) == INTEGER_CST)
2209 int cmp = tree_int_cst_compare (op1b, op2b);
2211 /* If we have (op1a != op1b), we should either be able to
2212 return that or TRUE, depending on whether the constant op1b
2213 also satisfies the other comparison against op2b. */
2214 if (code1 == NE_EXPR)
2216 bool done = true;
2217 bool val;
2218 switch (code2)
2220 case EQ_EXPR: val = (cmp == 0); break;
2221 case NE_EXPR: val = (cmp != 0); break;
2222 case LT_EXPR: val = (cmp < 0); break;
2223 case GT_EXPR: val = (cmp > 0); break;
2224 case LE_EXPR: val = (cmp <= 0); break;
2225 case GE_EXPR: val = (cmp >= 0); break;
2226 default: done = false;
2228 if (done)
2230 if (val)
2231 return boolean_true_node;
2232 else
2233 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2236 /* Likewise if the second comparison is a != comparison. */
2237 else if (code2 == NE_EXPR)
2239 bool done = true;
2240 bool val;
2241 switch (code1)
2243 case EQ_EXPR: val = (cmp == 0); break;
2244 case NE_EXPR: val = (cmp != 0); break;
2245 case LT_EXPR: val = (cmp > 0); break;
2246 case GT_EXPR: val = (cmp < 0); break;
2247 case LE_EXPR: val = (cmp >= 0); break;
2248 case GE_EXPR: val = (cmp <= 0); break;
2249 default: done = false;
2251 if (done)
2253 if (val)
2254 return boolean_true_node;
2255 else
2256 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2260 /* See if an equality test is redundant with the other comparison. */
2261 else if (code1 == EQ_EXPR)
2263 bool val;
2264 switch (code2)
2266 case EQ_EXPR: val = (cmp == 0); break;
2267 case NE_EXPR: val = (cmp != 0); break;
2268 case LT_EXPR: val = (cmp < 0); break;
2269 case GT_EXPR: val = (cmp > 0); break;
2270 case LE_EXPR: val = (cmp <= 0); break;
2271 case GE_EXPR: val = (cmp >= 0); break;
2272 default:
2273 val = false;
2275 if (val)
2276 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2278 else if (code2 == EQ_EXPR)
2280 bool val;
2281 switch (code1)
2283 case EQ_EXPR: val = (cmp == 0); break;
2284 case NE_EXPR: val = (cmp != 0); break;
2285 case LT_EXPR: val = (cmp > 0); break;
2286 case GT_EXPR: val = (cmp < 0); break;
2287 case LE_EXPR: val = (cmp >= 0); break;
2288 case GE_EXPR: val = (cmp <= 0); break;
2289 default:
2290 val = false;
2292 if (val)
2293 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2296 /* Chose the less restrictive of two < or <= comparisons. */
2297 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2298 && (code2 == LT_EXPR || code2 == LE_EXPR))
2300 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2301 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2302 else
2303 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2306 /* Likewise chose the less restrictive of two > or >= comparisons. */
2307 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2308 && (code2 == GT_EXPR || code2 == GE_EXPR))
2310 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2311 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2312 else
2313 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2316 /* Check for singleton ranges. */
2317 else if (cmp == 0
2318 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2319 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2320 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2322 /* Check for less/greater pairs that don't restrict the range at all. */
2323 else if (cmp >= 0
2324 && (code1 == LT_EXPR || code1 == LE_EXPR)
2325 && (code2 == GT_EXPR || code2 == GE_EXPR))
2326 return boolean_true_node;
2327 else if (cmp <= 0
2328 && (code1 == GT_EXPR || code1 == GE_EXPR)
2329 && (code2 == LT_EXPR || code2 == LE_EXPR))
2330 return boolean_true_node;
2333 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2334 NAME's definition is a truth value. See if there are any simplifications
2335 that can be done against the NAME's definition. */
2336 if (TREE_CODE (op1a) == SSA_NAME
2337 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2338 && (integer_zerop (op1b) || integer_onep (op1b)))
2340 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2341 || (code1 == NE_EXPR && integer_onep (op1b)));
2342 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2343 switch (gimple_code (stmt))
2345 case GIMPLE_ASSIGN:
2346 /* Try to simplify by copy-propagating the definition. */
2347 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2349 case GIMPLE_PHI:
2350 /* If every argument to the PHI produces the same result when
2351 ORed with the second comparison, we win.
2352 Do not do this unless the type is bool since we need a bool
2353 result here anyway. */
2354 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2356 tree result = NULL_TREE;
2357 unsigned i;
2358 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2360 tree arg = gimple_phi_arg_def (stmt, i);
2362 /* If this PHI has itself as an argument, ignore it.
2363 If all the other args produce the same result,
2364 we're still OK. */
2365 if (arg == gimple_phi_result (stmt))
2366 continue;
2367 else if (TREE_CODE (arg) == INTEGER_CST)
2369 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2371 if (!result)
2372 result = boolean_true_node;
2373 else if (!integer_onep (result))
2374 return NULL_TREE;
2376 else if (!result)
2377 result = fold_build2 (code2, boolean_type_node,
2378 op2a, op2b);
2379 else if (!same_bool_comparison_p (result,
2380 code2, op2a, op2b))
2381 return NULL_TREE;
2383 else if (TREE_CODE (arg) == SSA_NAME
2384 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2386 tree temp;
2387 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2388 /* In simple cases we can look through PHI nodes,
2389 but we have to be careful with loops.
2390 See PR49073. */
2391 if (! dom_info_available_p (CDI_DOMINATORS)
2392 || gimple_bb (def_stmt) == gimple_bb (stmt)
2393 || dominated_by_p (CDI_DOMINATORS,
2394 gimple_bb (def_stmt),
2395 gimple_bb (stmt)))
2396 return NULL_TREE;
2397 temp = or_var_with_comparison (arg, invert, code2,
2398 op2a, op2b);
2399 if (!temp)
2400 return NULL_TREE;
2401 else if (!result)
2402 result = temp;
2403 else if (!same_bool_result_p (result, temp))
2404 return NULL_TREE;
2406 else
2407 return NULL_TREE;
2409 return result;
2412 default:
2413 break;
2416 return NULL_TREE;
2419 /* Try to simplify the OR of two comparisons, specified by
2420 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2421 If this can be simplified to a single expression (without requiring
2422 introducing more SSA variables to hold intermediate values),
2423 return the resulting tree. Otherwise return NULL_TREE.
2424 If the result expression is non-null, it has boolean type. */
2426 tree
2427 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2428 enum tree_code code2, tree op2a, tree op2b)
2430 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2431 if (t)
2432 return t;
2433 else
2434 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2438 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2440 Either NULL_TREE, a simplified but non-constant or a constant
2441 is returned.
2443 ??? This should go into a gimple-fold-inline.h file to be eventually
2444 privatized with the single valueize function used in the various TUs
2445 to avoid the indirect function call overhead. */
2447 tree
2448 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2450 location_t loc = gimple_location (stmt);
2451 switch (gimple_code (stmt))
2453 case GIMPLE_ASSIGN:
2455 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2457 switch (get_gimple_rhs_class (subcode))
2459 case GIMPLE_SINGLE_RHS:
2461 tree rhs = gimple_assign_rhs1 (stmt);
2462 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2464 if (TREE_CODE (rhs) == SSA_NAME)
2466 /* If the RHS is an SSA_NAME, return its known constant value,
2467 if any. */
2468 return (*valueize) (rhs);
2470 /* Handle propagating invariant addresses into address
2471 operations. */
2472 else if (TREE_CODE (rhs) == ADDR_EXPR
2473 && !is_gimple_min_invariant (rhs))
2475 HOST_WIDE_INT offset = 0;
2476 tree base;
2477 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2478 &offset,
2479 valueize);
2480 if (base
2481 && (CONSTANT_CLASS_P (base)
2482 || decl_address_invariant_p (base)))
2483 return build_invariant_address (TREE_TYPE (rhs),
2484 base, offset);
2486 else if (TREE_CODE (rhs) == CONSTRUCTOR
2487 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2488 && (CONSTRUCTOR_NELTS (rhs)
2489 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2491 unsigned i;
2492 tree val, *vec;
2494 vec = XALLOCAVEC (tree,
2495 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2496 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2498 val = (*valueize) (val);
2499 if (TREE_CODE (val) == INTEGER_CST
2500 || TREE_CODE (val) == REAL_CST
2501 || TREE_CODE (val) == FIXED_CST)
2502 vec[i] = val;
2503 else
2504 return NULL_TREE;
2507 return build_vector (TREE_TYPE (rhs), vec);
2510 if (kind == tcc_reference)
2512 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2513 || TREE_CODE (rhs) == REALPART_EXPR
2514 || TREE_CODE (rhs) == IMAGPART_EXPR)
2515 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2517 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2518 return fold_unary_loc (EXPR_LOCATION (rhs),
2519 TREE_CODE (rhs),
2520 TREE_TYPE (rhs), val);
2522 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2523 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2525 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2526 return fold_ternary_loc (EXPR_LOCATION (rhs),
2527 TREE_CODE (rhs),
2528 TREE_TYPE (rhs), val,
2529 TREE_OPERAND (rhs, 1),
2530 TREE_OPERAND (rhs, 2));
2532 else if (TREE_CODE (rhs) == MEM_REF
2533 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2535 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2536 if (TREE_CODE (val) == ADDR_EXPR
2537 && is_gimple_min_invariant (val))
2539 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2540 unshare_expr (val),
2541 TREE_OPERAND (rhs, 1));
2542 if (tem)
2543 rhs = tem;
2546 return fold_const_aggregate_ref_1 (rhs, valueize);
2548 else if (kind == tcc_declaration)
2549 return get_symbol_constant_value (rhs);
2550 return rhs;
2553 case GIMPLE_UNARY_RHS:
2555 /* Handle unary operators that can appear in GIMPLE form.
2556 Note that we know the single operand must be a constant,
2557 so this should almost always return a simplified RHS. */
2558 tree lhs = gimple_assign_lhs (stmt);
2559 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2561 /* Conversions are useless for CCP purposes if they are
2562 value-preserving. Thus the restrictions that
2563 useless_type_conversion_p places for restrict qualification
2564 of pointer types should not apply here.
2565 Substitution later will only substitute to allowed places. */
2566 if (CONVERT_EXPR_CODE_P (subcode)
2567 && POINTER_TYPE_P (TREE_TYPE (lhs))
2568 && POINTER_TYPE_P (TREE_TYPE (op0))
2569 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2570 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2571 && TYPE_MODE (TREE_TYPE (lhs))
2572 == TYPE_MODE (TREE_TYPE (op0)))
2573 return op0;
2575 return
2576 fold_unary_ignore_overflow_loc (loc, subcode,
2577 gimple_expr_type (stmt), op0);
2580 case GIMPLE_BINARY_RHS:
2582 /* Handle binary operators that can appear in GIMPLE form. */
2583 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2584 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2586 /* Translate &x + CST into an invariant form suitable for
2587 further propagation. */
2588 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2589 && TREE_CODE (op0) == ADDR_EXPR
2590 && TREE_CODE (op1) == INTEGER_CST)
2592 tree off = fold_convert (ptr_type_node, op1);
2593 return build_fold_addr_expr_loc
2594 (loc,
2595 fold_build2 (MEM_REF,
2596 TREE_TYPE (TREE_TYPE (op0)),
2597 unshare_expr (op0), off));
2600 return fold_binary_loc (loc, subcode,
2601 gimple_expr_type (stmt), op0, op1);
2604 case GIMPLE_TERNARY_RHS:
2606 /* Handle ternary operators that can appear in GIMPLE form. */
2607 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2608 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2609 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2611 /* Fold embedded expressions in ternary codes. */
2612 if ((subcode == COND_EXPR
2613 || subcode == VEC_COND_EXPR)
2614 && COMPARISON_CLASS_P (op0))
2616 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2617 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2618 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2619 TREE_TYPE (op0), op00, op01);
2620 if (tem)
2621 op0 = tem;
2624 return fold_ternary_loc (loc, subcode,
2625 gimple_expr_type (stmt), op0, op1, op2);
2628 default:
2629 gcc_unreachable ();
2633 case GIMPLE_CALL:
2635 tree fn;
2637 if (gimple_call_internal_p (stmt))
2638 /* No folding yet for these functions. */
2639 return NULL_TREE;
2641 fn = (*valueize) (gimple_call_fn (stmt));
2642 if (TREE_CODE (fn) == ADDR_EXPR
2643 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2644 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2646 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2647 tree call, retval;
2648 unsigned i;
2649 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2650 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2651 call = build_call_array_loc (loc,
2652 gimple_call_return_type (stmt),
2653 fn, gimple_call_num_args (stmt), args);
2654 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2655 if (retval)
2656 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2657 STRIP_NOPS (retval);
2658 return retval;
2660 return NULL_TREE;
2663 default:
2664 return NULL_TREE;
2668 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2669 Returns NULL_TREE if folding to a constant is not possible, otherwise
2670 returns a constant according to is_gimple_min_invariant. */
2672 tree
2673 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2675 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2676 if (res && is_gimple_min_invariant (res))
2677 return res;
2678 return NULL_TREE;
2682 /* The following set of functions are supposed to fold references using
2683 their constant initializers. */
2685 static tree fold_ctor_reference (tree type, tree ctor,
2686 unsigned HOST_WIDE_INT offset,
2687 unsigned HOST_WIDE_INT size, tree);
2689 /* See if we can find constructor defining value of BASE.
2690 When we know the consructor with constant offset (such as
2691 base is array[40] and we do know constructor of array), then
2692 BIT_OFFSET is adjusted accordingly.
2694 As a special case, return error_mark_node when constructor
2695 is not explicitly available, but it is known to be zero
2696 such as 'static const int a;'. */
2697 static tree
2698 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2699 tree (*valueize)(tree))
2701 HOST_WIDE_INT bit_offset2, size, max_size;
2702 if (TREE_CODE (base) == MEM_REF)
2704 if (!integer_zerop (TREE_OPERAND (base, 1)))
2706 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2707 return NULL_TREE;
2708 *bit_offset += (mem_ref_offset (base).low
2709 * BITS_PER_UNIT);
2712 if (valueize
2713 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2714 base = valueize (TREE_OPERAND (base, 0));
2715 if (!base || TREE_CODE (base) != ADDR_EXPR)
2716 return NULL_TREE;
2717 base = TREE_OPERAND (base, 0);
2720 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2721 DECL_INITIAL. If BASE is a nested reference into another
2722 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2723 the inner reference. */
2724 switch (TREE_CODE (base))
2726 case VAR_DECL:
2727 if (!const_value_known_p (base))
2728 return NULL_TREE;
2730 /* Fallthru. */
2731 case CONST_DECL:
2732 if (!DECL_INITIAL (base)
2733 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2734 return error_mark_node;
2735 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2736 as special marker (_not_ zero ...) for its own purposes. */
2737 if (DECL_INITIAL (base) == error_mark_node)
2738 return NULL_TREE;
2739 return DECL_INITIAL (base);
2741 case ARRAY_REF:
2742 case COMPONENT_REF:
2743 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2744 if (max_size == -1 || size != max_size)
2745 return NULL_TREE;
2746 *bit_offset += bit_offset2;
2747 return get_base_constructor (base, bit_offset, valueize);
2749 case STRING_CST:
2750 case CONSTRUCTOR:
2751 return base;
2753 default:
2754 return NULL_TREE;
2758 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2759 to the memory at bit OFFSET.
2761 We do only simple job of folding byte accesses. */
2763 static tree
2764 fold_string_cst_ctor_reference (tree type, tree ctor,
2765 unsigned HOST_WIDE_INT offset,
2766 unsigned HOST_WIDE_INT size)
2768 if (INTEGRAL_TYPE_P (type)
2769 && (TYPE_MODE (type)
2770 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2771 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2772 == MODE_INT)
2773 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2774 && size == BITS_PER_UNIT
2775 && !(offset % BITS_PER_UNIT))
2777 offset /= BITS_PER_UNIT;
2778 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2779 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2780 [offset]));
2781 /* Folding
2782 const char a[20]="hello";
2783 return a[10];
2785 might lead to offset greater than string length. In this case we
2786 know value is either initialized to 0 or out of bounds. Return 0
2787 in both cases. */
2788 return build_zero_cst (type);
2790 return NULL_TREE;
2793 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2794 SIZE to the memory at bit OFFSET. */
2796 static tree
2797 fold_array_ctor_reference (tree type, tree ctor,
2798 unsigned HOST_WIDE_INT offset,
2799 unsigned HOST_WIDE_INT size,
2800 tree from_decl)
2802 unsigned HOST_WIDE_INT cnt;
2803 tree cfield, cval;
2804 double_int low_bound, elt_size;
2805 double_int index, max_index;
2806 double_int access_index;
2807 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2808 HOST_WIDE_INT inner_offset;
2810 /* Compute low bound and elt size. */
2811 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2812 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2813 if (domain_type && TYPE_MIN_VALUE (domain_type))
2815 /* Static constructors for variably sized objects makes no sense. */
2816 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2817 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2818 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2820 else
2821 low_bound = double_int_zero;
2822 /* Static constructors for variably sized objects makes no sense. */
2823 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2824 == INTEGER_CST);
2825 elt_size =
2826 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2829 /* We can handle only constantly sized accesses that are known to not
2830 be larger than size of array element. */
2831 if (!TYPE_SIZE_UNIT (type)
2832 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2833 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2834 return NULL_TREE;
2836 /* Compute the array index we look for. */
2837 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2838 .udiv (elt_size, TRUNC_DIV_EXPR);
2839 access_index += low_bound;
2840 if (index_type)
2841 access_index = access_index.ext (TYPE_PRECISION (index_type),
2842 TYPE_UNSIGNED (index_type));
2844 /* And offset within the access. */
2845 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2847 /* See if the array field is large enough to span whole access. We do not
2848 care to fold accesses spanning multiple array indexes. */
2849 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2850 return NULL_TREE;
2852 index = low_bound - double_int_one;
2853 if (index_type)
2854 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2856 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2858 /* Array constructor might explicitely set index, or specify range
2859 or leave index NULL meaning that it is next index after previous
2860 one. */
2861 if (cfield)
2863 if (TREE_CODE (cfield) == INTEGER_CST)
2864 max_index = index = tree_to_double_int (cfield);
2865 else
2867 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2868 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2869 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2872 else
2874 index += double_int_one;
2875 if (index_type)
2876 index = index.ext (TYPE_PRECISION (index_type),
2877 TYPE_UNSIGNED (index_type));
2878 max_index = index;
2881 /* Do we have match? */
2882 if (access_index.cmp (index, 1) >= 0
2883 && access_index.cmp (max_index, 1) <= 0)
2884 return fold_ctor_reference (type, cval, inner_offset, size,
2885 from_decl);
2887 /* When memory is not explicitely mentioned in constructor,
2888 it is 0 (or out of range). */
2889 return build_zero_cst (type);
2892 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2893 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2895 static tree
2896 fold_nonarray_ctor_reference (tree type, tree ctor,
2897 unsigned HOST_WIDE_INT offset,
2898 unsigned HOST_WIDE_INT size,
2899 tree from_decl)
2901 unsigned HOST_WIDE_INT cnt;
2902 tree cfield, cval;
2904 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2905 cval)
2907 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2908 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2909 tree field_size = DECL_SIZE (cfield);
2910 double_int bitoffset;
2911 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2912 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2913 double_int bitoffset_end, access_end;
2915 /* Variable sized objects in static constructors makes no sense,
2916 but field_size can be NULL for flexible array members. */
2917 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2918 && TREE_CODE (byte_offset) == INTEGER_CST
2919 && (field_size != NULL_TREE
2920 ? TREE_CODE (field_size) == INTEGER_CST
2921 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2923 /* Compute bit offset of the field. */
2924 bitoffset = tree_to_double_int (field_offset)
2925 + byte_offset_cst * bits_per_unit_cst;
2926 /* Compute bit offset where the field ends. */
2927 if (field_size != NULL_TREE)
2928 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2929 else
2930 bitoffset_end = double_int_zero;
2932 access_end = double_int::from_uhwi (offset)
2933 + double_int::from_uhwi (size);
2935 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2936 [BITOFFSET, BITOFFSET_END)? */
2937 if (access_end.cmp (bitoffset, 0) > 0
2938 && (field_size == NULL_TREE
2939 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2941 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2942 /* We do have overlap. Now see if field is large enough to
2943 cover the access. Give up for accesses spanning multiple
2944 fields. */
2945 if (access_end.cmp (bitoffset_end, 0) > 0)
2946 return NULL_TREE;
2947 if (double_int::from_uhwi (offset).slt (bitoffset))
2948 return NULL_TREE;
2949 return fold_ctor_reference (type, cval,
2950 inner_offset.to_uhwi (), size,
2951 from_decl);
2954 /* When memory is not explicitely mentioned in constructor, it is 0. */
2955 return build_zero_cst (type);
2958 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2959 to the memory at bit OFFSET. */
2961 static tree
2962 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2963 unsigned HOST_WIDE_INT size, tree from_decl)
2965 tree ret;
2967 /* We found the field with exact match. */
2968 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2969 && !offset)
2970 return canonicalize_constructor_val (ctor, from_decl);
2972 /* We are at the end of walk, see if we can view convert the
2973 result. */
2974 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2975 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2976 && operand_equal_p (TYPE_SIZE (type),
2977 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2979 ret = canonicalize_constructor_val (ctor, from_decl);
2980 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2981 if (ret)
2982 STRIP_NOPS (ret);
2983 return ret;
2985 if (TREE_CODE (ctor) == STRING_CST)
2986 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2987 if (TREE_CODE (ctor) == CONSTRUCTOR)
2990 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2991 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2992 return fold_array_ctor_reference (type, ctor, offset, size,
2993 from_decl);
2994 else
2995 return fold_nonarray_ctor_reference (type, ctor, offset, size,
2996 from_decl);
2999 return NULL_TREE;
3002 /* Return the tree representing the element referenced by T if T is an
3003 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3004 names using VALUEIZE. Return NULL_TREE otherwise. */
3006 tree
3007 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3009 tree ctor, idx, base;
3010 HOST_WIDE_INT offset, size, max_size;
3011 tree tem;
3013 if (TREE_THIS_VOLATILE (t))
3014 return NULL_TREE;
3016 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3017 return get_symbol_constant_value (t);
3019 tem = fold_read_from_constant_string (t);
3020 if (tem)
3021 return tem;
3023 switch (TREE_CODE (t))
3025 case ARRAY_REF:
3026 case ARRAY_RANGE_REF:
3027 /* Constant indexes are handled well by get_base_constructor.
3028 Only special case variable offsets.
3029 FIXME: This code can't handle nested references with variable indexes
3030 (they will be handled only by iteration of ccp). Perhaps we can bring
3031 get_ref_base_and_extent here and make it use a valueize callback. */
3032 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3033 && valueize
3034 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3035 && TREE_CODE (idx) == INTEGER_CST)
3037 tree low_bound, unit_size;
3038 double_int doffset;
3040 /* If the resulting bit-offset is constant, track it. */
3041 if ((low_bound = array_ref_low_bound (t),
3042 TREE_CODE (low_bound) == INTEGER_CST)
3043 && (unit_size = array_ref_element_size (t),
3044 host_integerp (unit_size, 1))
3045 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3046 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3047 doffset.fits_shwi ()))
3049 offset = doffset.to_shwi ();
3050 offset *= TREE_INT_CST_LOW (unit_size);
3051 offset *= BITS_PER_UNIT;
3053 base = TREE_OPERAND (t, 0);
3054 ctor = get_base_constructor (base, &offset, valueize);
3055 /* Empty constructor. Always fold to 0. */
3056 if (ctor == error_mark_node)
3057 return build_zero_cst (TREE_TYPE (t));
3058 /* Out of bound array access. Value is undefined,
3059 but don't fold. */
3060 if (offset < 0)
3061 return NULL_TREE;
3062 /* We can not determine ctor. */
3063 if (!ctor)
3064 return NULL_TREE;
3065 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3066 TREE_INT_CST_LOW (unit_size)
3067 * BITS_PER_UNIT,
3068 base);
3071 /* Fallthru. */
3073 case COMPONENT_REF:
3074 case BIT_FIELD_REF:
3075 case TARGET_MEM_REF:
3076 case MEM_REF:
3077 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3078 ctor = get_base_constructor (base, &offset, valueize);
3080 /* Empty constructor. Always fold to 0. */
3081 if (ctor == error_mark_node)
3082 return build_zero_cst (TREE_TYPE (t));
3083 /* We do not know precise address. */
3084 if (max_size == -1 || max_size != size)
3085 return NULL_TREE;
3086 /* We can not determine ctor. */
3087 if (!ctor)
3088 return NULL_TREE;
3090 /* Out of bound array access. Value is undefined, but don't fold. */
3091 if (offset < 0)
3092 return NULL_TREE;
3094 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3095 base);
3097 case REALPART_EXPR:
3098 case IMAGPART_EXPR:
3100 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3101 if (c && TREE_CODE (c) == COMPLEX_CST)
3102 return fold_build1_loc (EXPR_LOCATION (t),
3103 TREE_CODE (t), TREE_TYPE (t), c);
3104 break;
3107 default:
3108 break;
3111 return NULL_TREE;
3114 tree
3115 fold_const_aggregate_ref (tree t)
3117 return fold_const_aggregate_ref_1 (t, NULL);
3120 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3121 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3122 KNOWN_BINFO carries the binfo describing the true type of
3123 OBJ_TYPE_REF_OBJECT(REF). */
3125 tree
3126 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3128 unsigned HOST_WIDE_INT offset, size;
3129 tree v, fn, vtable;
3131 vtable = v = BINFO_VTABLE (known_binfo);
3132 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3133 if (!v)
3134 return NULL_TREE;
3136 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3138 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3139 v = TREE_OPERAND (v, 0);
3141 else
3142 offset = 0;
3144 if (TREE_CODE (v) != ADDR_EXPR)
3145 return NULL_TREE;
3146 v = TREE_OPERAND (v, 0);
3148 if (TREE_CODE (v) != VAR_DECL
3149 || !DECL_VIRTUAL_P (v)
3150 || !DECL_INITIAL (v)
3151 || DECL_INITIAL (v) == error_mark_node)
3152 return NULL_TREE;
3153 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3154 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3155 offset += token * size;
3156 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3157 offset, size, vtable);
3158 if (!fn || integer_zerop (fn))
3159 return NULL_TREE;
3160 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3161 || TREE_CODE (fn) == FDESC_EXPR);
3162 fn = TREE_OPERAND (fn, 0);
3163 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3165 /* When cgraph node is missing and function is not public, we cannot
3166 devirtualize. This can happen in WHOPR when the actual method
3167 ends up in other partition, because we found devirtualization
3168 possibility too late. */
3169 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3170 return NULL_TREE;
3172 /* Make sure we create a cgraph node for functions we'll reference.
3173 They can be non-existent if the reference comes from an entry
3174 of an external vtable for example. */
3175 cgraph_get_create_node (fn);
3177 return fn;
3180 /* Return true iff VAL is a gimple expression that is known to be
3181 non-negative. Restricted to floating-point inputs. */
3183 bool
3184 gimple_val_nonnegative_real_p (tree val)
3186 gimple def_stmt;
3188 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3190 /* Use existing logic for non-gimple trees. */
3191 if (tree_expr_nonnegative_p (val))
3192 return true;
3194 if (TREE_CODE (val) != SSA_NAME)
3195 return false;
3197 /* Currently we look only at the immediately defining statement
3198 to make this determination, since recursion on defining
3199 statements of operands can lead to quadratic behavior in the
3200 worst case. This is expected to catch almost all occurrences
3201 in practice. It would be possible to implement limited-depth
3202 recursion if important cases are lost. Alternatively, passes
3203 that need this information (such as the pow/powi lowering code
3204 in the cse_sincos pass) could be revised to provide it through
3205 dataflow propagation. */
3207 def_stmt = SSA_NAME_DEF_STMT (val);
3209 if (is_gimple_assign (def_stmt))
3211 tree op0, op1;
3213 /* See fold-const.c:tree_expr_nonnegative_p for additional
3214 cases that could be handled with recursion. */
3216 switch (gimple_assign_rhs_code (def_stmt))
3218 case ABS_EXPR:
3219 /* Always true for floating-point operands. */
3220 return true;
3222 case MULT_EXPR:
3223 /* True if the two operands are identical (since we are
3224 restricted to floating-point inputs). */
3225 op0 = gimple_assign_rhs1 (def_stmt);
3226 op1 = gimple_assign_rhs2 (def_stmt);
3228 if (op0 == op1
3229 || operand_equal_p (op0, op1, 0))
3230 return true;
3232 default:
3233 return false;
3236 else if (is_gimple_call (def_stmt))
3238 tree fndecl = gimple_call_fndecl (def_stmt);
3239 if (fndecl
3240 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3242 tree arg1;
3244 switch (DECL_FUNCTION_CODE (fndecl))
3246 CASE_FLT_FN (BUILT_IN_ACOS):
3247 CASE_FLT_FN (BUILT_IN_ACOSH):
3248 CASE_FLT_FN (BUILT_IN_CABS):
3249 CASE_FLT_FN (BUILT_IN_COSH):
3250 CASE_FLT_FN (BUILT_IN_ERFC):
3251 CASE_FLT_FN (BUILT_IN_EXP):
3252 CASE_FLT_FN (BUILT_IN_EXP10):
3253 CASE_FLT_FN (BUILT_IN_EXP2):
3254 CASE_FLT_FN (BUILT_IN_FABS):
3255 CASE_FLT_FN (BUILT_IN_FDIM):
3256 CASE_FLT_FN (BUILT_IN_HYPOT):
3257 CASE_FLT_FN (BUILT_IN_POW10):
3258 return true;
3260 CASE_FLT_FN (BUILT_IN_SQRT):
3261 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3262 nonnegative inputs. */
3263 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3264 return true;
3266 break;
3268 CASE_FLT_FN (BUILT_IN_POWI):
3269 /* True if the second argument is an even integer. */
3270 arg1 = gimple_call_arg (def_stmt, 1);
3272 if (TREE_CODE (arg1) == INTEGER_CST
3273 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3274 return true;
3276 break;
3278 CASE_FLT_FN (BUILT_IN_POW):
3279 /* True if the second argument is an even integer-valued
3280 real. */
3281 arg1 = gimple_call_arg (def_stmt, 1);
3283 if (TREE_CODE (arg1) == REAL_CST)
3285 REAL_VALUE_TYPE c;
3286 HOST_WIDE_INT n;
3288 c = TREE_REAL_CST (arg1);
3289 n = real_to_integer (&c);
3291 if ((n & 1) == 0)
3293 REAL_VALUE_TYPE cint;
3294 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3295 if (real_identical (&c, &cint))
3296 return true;
3300 break;
3302 default:
3303 return false;
3308 return false;