PR c++/18747
[official-gcc.git] / gcc / gimple-fold.c
blob19a259e774e7fd807c008d4490201fb36a5a67a9
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "dumpfile.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
31 #include "target.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
37 reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61 symtab_node snode;
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
65 external var. */
66 if (!from_decl
67 || TREE_CODE (from_decl) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl)
69 || (flag_ltrans
70 && symtab_get_node (from_decl)->symbol.in_other_partition))
71 return true;
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
74 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
75 return true;
76 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
77 are always safe. */
78 if (DECL_EXTERNAL (decl)
79 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
80 return true;
81 /* We are folding reference from external vtable. The vtable may reffer
82 to a symbol keyed to other compilation unit. The other compilation
83 unit may be in separate DSO and the symbol may be hidden. */
84 if (DECL_VISIBILITY_SPECIFIED (decl)
85 && DECL_EXTERNAL (decl)
86 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition))
87 return false;
88 /* When function is public, we always can introduce new reference.
89 Exception are the COMDAT functions where introducing a direct
90 reference imply need to include function body in the curren tunit. */
91 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
92 return true;
93 /* We are not at ltrans stage; so don't worry about WHOPR.
94 Also when still gimplifying all referred comdat functions will be
95 produced.
97 As observed in PR20991 for already optimized out comdat virtual functions
98 it may be tempting to not necessarily give up because the copy will be
99 output elsewhere when corresponding vtable is output.
100 This is however not possible - ABI specify that COMDATs are output in
101 units where they are used and when the other unit was compiled with LTO
102 it is possible that vtable was kept public while the function itself
103 was privatized. */
104 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
105 return true;
107 /* OK we are seeing either COMDAT or static variable. In this case we must
108 check that the definition is still around so we can refer it. */
109 if (TREE_CODE (decl) == FUNCTION_DECL)
111 node = cgraph_get_node (decl);
112 /* Check that we still have function body and that we didn't took
113 the decision to eliminate offline copy of the function yet.
114 The second is important when devirtualization happens during final
115 compilation stage when making a new reference no longer makes callee
116 to be compiled. */
117 if (!node || !node->analyzed || node->global.inlined_to)
119 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
120 return false;
123 else if (TREE_CODE (decl) == VAR_DECL)
125 vnode = varpool_get_node (decl);
126 if (!vnode || !vnode->analyzed)
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
129 return false;
132 return true;
135 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
136 acceptable form for is_gimple_min_invariant.
137 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
139 tree
140 canonicalize_constructor_val (tree cval, tree from_decl)
142 STRIP_USELESS_TYPE_CONVERSION (cval);
143 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
144 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
146 tree ptr = TREE_OPERAND (cval, 0);
147 if (is_gimple_min_invariant (ptr))
148 cval = build1_loc (EXPR_LOCATION (cval),
149 ADDR_EXPR, TREE_TYPE (ptr),
150 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
151 ptr,
152 fold_convert (ptr_type_node,
153 TREE_OPERAND (cval, 1))));
155 if (TREE_CODE (cval) == ADDR_EXPR)
157 tree base = get_base_address (TREE_OPERAND (cval, 0));
158 if (!base && TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
160 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
161 if (base)
162 TREE_OPERAND (cval, 0) = base;
164 if (!base)
165 return NULL_TREE;
167 if ((TREE_CODE (base) == VAR_DECL
168 || TREE_CODE (base) == FUNCTION_DECL)
169 && !can_refer_decl_in_current_unit_p (base, from_decl))
170 return NULL_TREE;
171 if (TREE_CODE (base) == VAR_DECL)
172 TREE_ADDRESSABLE (base) = 1;
173 else if (TREE_CODE (base) == FUNCTION_DECL)
175 /* Make sure we create a cgraph node for functions we'll reference.
176 They can be non-existent if the reference comes from an entry
177 of an external vtable for example. */
178 cgraph_get_create_node (base);
180 /* Fixup types in global initializers. */
181 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
182 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
184 return cval;
187 /* If SYM is a constant variable with known value, return the value.
188 NULL_TREE is returned otherwise. */
190 tree
191 get_symbol_constant_value (tree sym)
193 if (const_value_known_p (sym))
195 tree val = DECL_INITIAL (sym);
196 if (val)
198 val = canonicalize_constructor_val (val, sym);
199 if (val && is_gimple_min_invariant (val))
200 return val;
201 else
202 return NULL_TREE;
204 /* Variables declared 'const' without an initializer
205 have zero as the initializer if they may not be
206 overridden at link or run time. */
207 if (!val
208 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
209 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
210 return build_zero_cst (TREE_TYPE (sym));
213 return NULL_TREE;
218 /* Subroutine of fold_stmt. We perform several simplifications of the
219 memory reference tree EXPR and make sure to re-gimplify them properly
220 after propagation of constant addresses. IS_LHS is true if the
221 reference is supposed to be an lvalue. */
223 static tree
224 maybe_fold_reference (tree expr, bool is_lhs)
226 tree *t = &expr;
227 tree result;
229 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
230 || TREE_CODE (expr) == REALPART_EXPR
231 || TREE_CODE (expr) == IMAGPART_EXPR)
232 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
233 return fold_unary_loc (EXPR_LOCATION (expr),
234 TREE_CODE (expr),
235 TREE_TYPE (expr),
236 TREE_OPERAND (expr, 0));
237 else if (TREE_CODE (expr) == BIT_FIELD_REF
238 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
239 return fold_ternary_loc (EXPR_LOCATION (expr),
240 TREE_CODE (expr),
241 TREE_TYPE (expr),
242 TREE_OPERAND (expr, 0),
243 TREE_OPERAND (expr, 1),
244 TREE_OPERAND (expr, 2));
246 while (handled_component_p (*t))
247 t = &TREE_OPERAND (*t, 0);
249 /* Canonicalize MEM_REFs invariant address operand. Do this first
250 to avoid feeding non-canonical MEM_REFs elsewhere. */
251 if (TREE_CODE (*t) == MEM_REF
252 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
254 bool volatile_p = TREE_THIS_VOLATILE (*t);
255 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
256 TREE_OPERAND (*t, 0),
257 TREE_OPERAND (*t, 1));
258 if (tem)
260 TREE_THIS_VOLATILE (tem) = volatile_p;
261 *t = tem;
262 tem = maybe_fold_reference (expr, is_lhs);
263 if (tem)
264 return tem;
265 return expr;
269 if (!is_lhs
270 && (result = fold_const_aggregate_ref (expr))
271 && is_gimple_min_invariant (result))
272 return result;
274 /* Fold back MEM_REFs to reference trees. */
275 if (TREE_CODE (*t) == MEM_REF
276 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
277 && integer_zerop (TREE_OPERAND (*t, 1))
278 && (TREE_THIS_VOLATILE (*t)
279 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
280 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
281 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
282 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
283 /* We have to look out here to not drop a required conversion
284 from the rhs to the lhs if is_lhs, but we don't have the
285 rhs here to verify that. Thus require strict type
286 compatibility. */
287 && types_compatible_p (TREE_TYPE (*t),
288 TREE_TYPE (TREE_OPERAND
289 (TREE_OPERAND (*t, 0), 0))))
291 tree tem;
292 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
293 tem = maybe_fold_reference (expr, is_lhs);
294 if (tem)
295 return tem;
296 return expr;
298 else if (TREE_CODE (*t) == TARGET_MEM_REF)
300 tree tem = maybe_fold_tmr (*t);
301 if (tem)
303 *t = tem;
304 tem = maybe_fold_reference (expr, is_lhs);
305 if (tem)
306 return tem;
307 return expr;
311 return NULL_TREE;
315 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
316 replacement rhs for the statement or NULL_TREE if no simplification
317 could be made. It is assumed that the operands have been previously
318 folded. */
320 static tree
321 fold_gimple_assign (gimple_stmt_iterator *si)
323 gimple stmt = gsi_stmt (*si);
324 enum tree_code subcode = gimple_assign_rhs_code (stmt);
325 location_t loc = gimple_location (stmt);
327 tree result = NULL_TREE;
329 switch (get_gimple_rhs_class (subcode))
331 case GIMPLE_SINGLE_RHS:
333 tree rhs = gimple_assign_rhs1 (stmt);
335 if (REFERENCE_CLASS_P (rhs))
336 return maybe_fold_reference (rhs, false);
338 else if (TREE_CODE (rhs) == ADDR_EXPR)
340 tree ref = TREE_OPERAND (rhs, 0);
341 tree tem = maybe_fold_reference (ref, true);
342 if (tem
343 && TREE_CODE (tem) == MEM_REF
344 && integer_zerop (TREE_OPERAND (tem, 1)))
345 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
346 else if (tem)
347 result = fold_convert (TREE_TYPE (rhs),
348 build_fold_addr_expr_loc (loc, tem));
349 else if (TREE_CODE (ref) == MEM_REF
350 && integer_zerop (TREE_OPERAND (ref, 1)))
351 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
354 else if (TREE_CODE (rhs) == CONSTRUCTOR
355 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
356 && (CONSTRUCTOR_NELTS (rhs)
357 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
359 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
360 unsigned i;
361 tree val;
363 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
364 if (TREE_CODE (val) != INTEGER_CST
365 && TREE_CODE (val) != REAL_CST
366 && TREE_CODE (val) != FIXED_CST)
367 return NULL_TREE;
369 return build_vector_from_ctor (TREE_TYPE (rhs),
370 CONSTRUCTOR_ELTS (rhs));
373 else if (DECL_P (rhs))
374 return unshare_expr (get_symbol_constant_value (rhs));
376 /* If we couldn't fold the RHS, hand over to the generic
377 fold routines. */
378 if (result == NULL_TREE)
379 result = fold (rhs);
381 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
382 that may have been added by fold, and "useless" type
383 conversions that might now be apparent due to propagation. */
384 STRIP_USELESS_TYPE_CONVERSION (result);
386 if (result != rhs && valid_gimple_rhs_p (result))
387 return result;
389 return NULL_TREE;
391 break;
393 case GIMPLE_UNARY_RHS:
395 tree rhs = gimple_assign_rhs1 (stmt);
397 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
398 if (result)
400 /* If the operation was a conversion do _not_ mark a
401 resulting constant with TREE_OVERFLOW if the original
402 constant was not. These conversions have implementation
403 defined behavior and retaining the TREE_OVERFLOW flag
404 here would confuse later passes such as VRP. */
405 if (CONVERT_EXPR_CODE_P (subcode)
406 && TREE_CODE (result) == INTEGER_CST
407 && TREE_CODE (rhs) == INTEGER_CST)
408 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
410 STRIP_USELESS_TYPE_CONVERSION (result);
411 if (valid_gimple_rhs_p (result))
412 return result;
415 break;
417 case GIMPLE_BINARY_RHS:
418 /* Try to canonicalize for boolean-typed X the comparisons
419 X == 0, X == 1, X != 0, and X != 1. */
420 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
421 || gimple_assign_rhs_code (stmt) == NE_EXPR)
423 tree lhs = gimple_assign_lhs (stmt);
424 tree op1 = gimple_assign_rhs1 (stmt);
425 tree op2 = gimple_assign_rhs2 (stmt);
426 tree type = TREE_TYPE (op1);
428 /* Check whether the comparison operands are of the same boolean
429 type as the result type is.
430 Check that second operand is an integer-constant with value
431 one or zero. */
432 if (TREE_CODE (op2) == INTEGER_CST
433 && (integer_zerop (op2) || integer_onep (op2))
434 && useless_type_conversion_p (TREE_TYPE (lhs), type))
436 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
437 bool is_logical_not = false;
439 /* X == 0 and X != 1 is a logical-not.of X
440 X == 1 and X != 0 is X */
441 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
442 || (cmp_code == NE_EXPR && integer_onep (op2)))
443 is_logical_not = true;
445 if (is_logical_not == false)
446 result = op1;
447 /* Only for one-bit precision typed X the transformation
448 !X -> ~X is valied. */
449 else if (TYPE_PRECISION (type) == 1)
450 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
451 type, op1);
452 /* Otherwise we use !X -> X ^ 1. */
453 else
454 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
455 type, op1, build_int_cst (type, 1));
460 if (!result)
461 result = fold_binary_loc (loc, subcode,
462 TREE_TYPE (gimple_assign_lhs (stmt)),
463 gimple_assign_rhs1 (stmt),
464 gimple_assign_rhs2 (stmt));
466 if (result)
468 STRIP_USELESS_TYPE_CONVERSION (result);
469 if (valid_gimple_rhs_p (result))
470 return result;
472 break;
474 case GIMPLE_TERNARY_RHS:
475 /* Try to fold a conditional expression. */
476 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
478 tree op0 = gimple_assign_rhs1 (stmt);
479 tree tem;
480 bool set = false;
481 location_t cond_loc = gimple_location (stmt);
483 if (COMPARISON_CLASS_P (op0))
485 fold_defer_overflow_warnings ();
486 tem = fold_binary_loc (cond_loc,
487 TREE_CODE (op0), TREE_TYPE (op0),
488 TREE_OPERAND (op0, 0),
489 TREE_OPERAND (op0, 1));
490 /* This is actually a conditional expression, not a GIMPLE
491 conditional statement, however, the valid_gimple_rhs_p
492 test still applies. */
493 set = (tem && is_gimple_condexpr (tem)
494 && valid_gimple_rhs_p (tem));
495 fold_undefer_overflow_warnings (set, stmt, 0);
497 else if (is_gimple_min_invariant (op0))
499 tem = op0;
500 set = true;
502 else
503 return NULL_TREE;
505 if (set)
506 result = fold_build3_loc (cond_loc, COND_EXPR,
507 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
508 gimple_assign_rhs2 (stmt),
509 gimple_assign_rhs3 (stmt));
512 if (!result)
513 result = fold_ternary_loc (loc, subcode,
514 TREE_TYPE (gimple_assign_lhs (stmt)),
515 gimple_assign_rhs1 (stmt),
516 gimple_assign_rhs2 (stmt),
517 gimple_assign_rhs3 (stmt));
519 if (result)
521 STRIP_USELESS_TYPE_CONVERSION (result);
522 if (valid_gimple_rhs_p (result))
523 return result;
525 break;
527 case GIMPLE_INVALID_RHS:
528 gcc_unreachable ();
531 return NULL_TREE;
534 /* Attempt to fold a conditional statement. Return true if any changes were
535 made. We only attempt to fold the condition expression, and do not perform
536 any transformation that would require alteration of the cfg. It is
537 assumed that the operands have been previously folded. */
539 static bool
540 fold_gimple_cond (gimple stmt)
542 tree result = fold_binary_loc (gimple_location (stmt),
543 gimple_cond_code (stmt),
544 boolean_type_node,
545 gimple_cond_lhs (stmt),
546 gimple_cond_rhs (stmt));
548 if (result)
550 STRIP_USELESS_TYPE_CONVERSION (result);
551 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
553 gimple_cond_set_condition_from_tree (stmt, result);
554 return true;
558 return false;
561 /* Convert EXPR into a GIMPLE value suitable for substitution on the
562 RHS of an assignment. Insert the necessary statements before
563 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
564 is replaced. If the call is expected to produces a result, then it
565 is replaced by an assignment of the new RHS to the result variable.
566 If the result is to be ignored, then the call is replaced by a
567 GIMPLE_NOP. A proper VDEF chain is retained by making the first
568 VUSE and the last VDEF of the whole sequence be the same as the replaced
569 statement and using new SSA names for stores in between. */
571 void
572 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
574 tree lhs;
575 gimple stmt, new_stmt;
576 gimple_stmt_iterator i;
577 gimple_seq stmts = NULL;
578 struct gimplify_ctx gctx;
579 gimple laststore;
580 tree reaching_vuse;
582 stmt = gsi_stmt (*si_p);
584 gcc_assert (is_gimple_call (stmt));
586 push_gimplify_context (&gctx);
587 gctx.into_ssa = gimple_in_ssa_p (cfun);
589 lhs = gimple_call_lhs (stmt);
590 if (lhs == NULL_TREE)
592 gimplify_and_add (expr, &stmts);
593 /* We can end up with folding a memcpy of an empty class assignment
594 which gets optimized away by C++ gimplification. */
595 if (gimple_seq_empty_p (stmts))
597 pop_gimplify_context (NULL);
598 if (gimple_in_ssa_p (cfun))
600 unlink_stmt_vdef (stmt);
601 release_defs (stmt);
603 gsi_remove (si_p, true);
604 return;
607 else
609 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
610 new_stmt = gimple_build_assign (lhs, tmp);
611 i = gsi_last (stmts);
612 gsi_insert_after_without_update (&i, new_stmt,
613 GSI_CONTINUE_LINKING);
616 pop_gimplify_context (NULL);
618 if (gimple_has_location (stmt))
619 annotate_all_with_location (stmts, gimple_location (stmt));
621 /* First iterate over the replacement statements backward, assigning
622 virtual operands to their defining statements. */
623 laststore = NULL;
624 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
626 new_stmt = gsi_stmt (i);
627 if ((gimple_assign_single_p (new_stmt)
628 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
629 || (is_gimple_call (new_stmt)
630 && (gimple_call_flags (new_stmt)
631 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
633 tree vdef;
634 if (!laststore)
635 vdef = gimple_vdef (stmt);
636 else
637 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
638 gimple_set_vdef (new_stmt, vdef);
639 if (vdef && TREE_CODE (vdef) == SSA_NAME)
640 SSA_NAME_DEF_STMT (vdef) = new_stmt;
641 laststore = new_stmt;
645 /* Second iterate over the statements forward, assigning virtual
646 operands to their uses. */
647 reaching_vuse = gimple_vuse (stmt);
648 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
650 new_stmt = gsi_stmt (i);
651 /* If the new statement possibly has a VUSE, update it with exact SSA
652 name we know will reach this one. */
653 if (gimple_has_mem_ops (new_stmt))
654 gimple_set_vuse (new_stmt, reaching_vuse);
655 gimple_set_modified (new_stmt, true);
656 if (gimple_vdef (new_stmt))
657 reaching_vuse = gimple_vdef (new_stmt);
660 /* If the new sequence does not do a store release the virtual
661 definition of the original statement. */
662 if (reaching_vuse
663 && reaching_vuse == gimple_vuse (stmt))
665 tree vdef = gimple_vdef (stmt);
666 if (vdef
667 && TREE_CODE (vdef) == SSA_NAME)
669 unlink_stmt_vdef (stmt);
670 release_ssa_name (vdef);
674 /* Finally replace the original statement with the sequence. */
675 gsi_replace_with_seq (si_p, stmts, false);
678 /* Return the string length, maximum string length or maximum value of
679 ARG in LENGTH.
680 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
681 is not NULL and, for TYPE == 0, its value is not equal to the length
682 we determine or if we are unable to determine the length or value,
683 return false. VISITED is a bitmap of visited variables.
684 TYPE is 0 if string length should be returned, 1 for maximum string
685 length and 2 for maximum value ARG can have. */
687 static bool
688 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
690 tree var, val;
691 gimple def_stmt;
693 if (TREE_CODE (arg) != SSA_NAME)
695 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
696 if (TREE_CODE (arg) == ADDR_EXPR
697 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
698 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
700 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
701 if (TREE_CODE (aop0) == INDIRECT_REF
702 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
703 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
704 length, visited, type);
707 if (type == 2)
709 val = arg;
710 if (TREE_CODE (val) != INTEGER_CST
711 || tree_int_cst_sgn (val) < 0)
712 return false;
714 else
715 val = c_strlen (arg, 1);
716 if (!val)
717 return false;
719 if (*length)
721 if (type > 0)
723 if (TREE_CODE (*length) != INTEGER_CST
724 || TREE_CODE (val) != INTEGER_CST)
725 return false;
727 if (tree_int_cst_lt (*length, val))
728 *length = val;
729 return true;
731 else if (simple_cst_equal (val, *length) != 1)
732 return false;
735 *length = val;
736 return true;
739 /* If ARG is registered for SSA update we cannot look at its defining
740 statement. */
741 if (name_registered_for_update_p (arg))
742 return false;
744 /* If we were already here, break the infinite cycle. */
745 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
746 return true;
748 var = arg;
749 def_stmt = SSA_NAME_DEF_STMT (var);
751 switch (gimple_code (def_stmt))
753 case GIMPLE_ASSIGN:
754 /* The RHS of the statement defining VAR must either have a
755 constant length or come from another SSA_NAME with a constant
756 length. */
757 if (gimple_assign_single_p (def_stmt)
758 || gimple_assign_unary_nop_p (def_stmt))
760 tree rhs = gimple_assign_rhs1 (def_stmt);
761 return get_maxval_strlen (rhs, length, visited, type);
763 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
765 tree op2 = gimple_assign_rhs2 (def_stmt);
766 tree op3 = gimple_assign_rhs3 (def_stmt);
767 return get_maxval_strlen (op2, length, visited, type)
768 && get_maxval_strlen (op3, length, visited, type);
770 return false;
772 case GIMPLE_PHI:
774 /* All the arguments of the PHI node must have the same constant
775 length. */
776 unsigned i;
778 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
780 tree arg = gimple_phi_arg (def_stmt, i)->def;
782 /* If this PHI has itself as an argument, we cannot
783 determine the string length of this argument. However,
784 if we can find a constant string length for the other
785 PHI args then we can still be sure that this is a
786 constant string length. So be optimistic and just
787 continue with the next argument. */
788 if (arg == gimple_phi_result (def_stmt))
789 continue;
791 if (!get_maxval_strlen (arg, length, visited, type))
792 return false;
795 return true;
797 default:
798 return false;
803 /* Fold builtin call in statement STMT. Returns a simplified tree.
804 We may return a non-constant expression, including another call
805 to a different function and with different arguments, e.g.,
806 substituting memcpy for strcpy when the string length is known.
807 Note that some builtins expand into inline code that may not
808 be valid in GIMPLE. Callers must take care. */
810 tree
811 gimple_fold_builtin (gimple stmt)
813 tree result, val[3];
814 tree callee, a;
815 int arg_idx, type;
816 bitmap visited;
817 bool ignore;
818 int nargs;
819 location_t loc = gimple_location (stmt);
821 gcc_assert (is_gimple_call (stmt));
823 ignore = (gimple_call_lhs (stmt) == NULL);
825 /* First try the generic builtin folder. If that succeeds, return the
826 result directly. */
827 result = fold_call_stmt (stmt, ignore);
828 if (result)
830 if (ignore)
831 STRIP_NOPS (result);
832 return result;
835 /* Ignore MD builtins. */
836 callee = gimple_call_fndecl (stmt);
837 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
838 return NULL_TREE;
840 /* Give up for always_inline inline builtins until they are
841 inlined. */
842 if (avoid_folding_inline_builtin (callee))
843 return NULL_TREE;
845 /* If the builtin could not be folded, and it has no argument list,
846 we're done. */
847 nargs = gimple_call_num_args (stmt);
848 if (nargs == 0)
849 return NULL_TREE;
851 /* Limit the work only for builtins we know how to simplify. */
852 switch (DECL_FUNCTION_CODE (callee))
854 case BUILT_IN_STRLEN:
855 case BUILT_IN_FPUTS:
856 case BUILT_IN_FPUTS_UNLOCKED:
857 arg_idx = 0;
858 type = 0;
859 break;
860 case BUILT_IN_STRCPY:
861 case BUILT_IN_STRNCPY:
862 arg_idx = 1;
863 type = 0;
864 break;
865 case BUILT_IN_MEMCPY_CHK:
866 case BUILT_IN_MEMPCPY_CHK:
867 case BUILT_IN_MEMMOVE_CHK:
868 case BUILT_IN_MEMSET_CHK:
869 case BUILT_IN_STRNCPY_CHK:
870 case BUILT_IN_STPNCPY_CHK:
871 arg_idx = 2;
872 type = 2;
873 break;
874 case BUILT_IN_STRCPY_CHK:
875 case BUILT_IN_STPCPY_CHK:
876 arg_idx = 1;
877 type = 1;
878 break;
879 case BUILT_IN_SNPRINTF_CHK:
880 case BUILT_IN_VSNPRINTF_CHK:
881 arg_idx = 1;
882 type = 2;
883 break;
884 default:
885 return NULL_TREE;
888 if (arg_idx >= nargs)
889 return NULL_TREE;
891 /* Try to use the dataflow information gathered by the CCP process. */
892 visited = BITMAP_ALLOC (NULL);
893 bitmap_clear (visited);
895 memset (val, 0, sizeof (val));
896 a = gimple_call_arg (stmt, arg_idx);
897 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
898 val[arg_idx] = NULL_TREE;
900 BITMAP_FREE (visited);
902 result = NULL_TREE;
903 switch (DECL_FUNCTION_CODE (callee))
905 case BUILT_IN_STRLEN:
906 if (val[0] && nargs == 1)
908 tree new_val =
909 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
911 /* If the result is not a valid gimple value, or not a cast
912 of a valid gimple value, then we cannot use the result. */
913 if (is_gimple_val (new_val)
914 || (CONVERT_EXPR_P (new_val)
915 && is_gimple_val (TREE_OPERAND (new_val, 0))))
916 return new_val;
918 break;
920 case BUILT_IN_STRCPY:
921 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
922 result = fold_builtin_strcpy (loc, callee,
923 gimple_call_arg (stmt, 0),
924 gimple_call_arg (stmt, 1),
925 val[1]);
926 break;
928 case BUILT_IN_STRNCPY:
929 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
930 result = fold_builtin_strncpy (loc, callee,
931 gimple_call_arg (stmt, 0),
932 gimple_call_arg (stmt, 1),
933 gimple_call_arg (stmt, 2),
934 val[1]);
935 break;
937 case BUILT_IN_FPUTS:
938 if (nargs == 2)
939 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
940 gimple_call_arg (stmt, 1),
941 ignore, false, val[0]);
942 break;
944 case BUILT_IN_FPUTS_UNLOCKED:
945 if (nargs == 2)
946 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
947 gimple_call_arg (stmt, 1),
948 ignore, true, val[0]);
949 break;
951 case BUILT_IN_MEMCPY_CHK:
952 case BUILT_IN_MEMPCPY_CHK:
953 case BUILT_IN_MEMMOVE_CHK:
954 case BUILT_IN_MEMSET_CHK:
955 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
956 result = fold_builtin_memory_chk (loc, callee,
957 gimple_call_arg (stmt, 0),
958 gimple_call_arg (stmt, 1),
959 gimple_call_arg (stmt, 2),
960 gimple_call_arg (stmt, 3),
961 val[2], ignore,
962 DECL_FUNCTION_CODE (callee));
963 break;
965 case BUILT_IN_STRCPY_CHK:
966 case BUILT_IN_STPCPY_CHK:
967 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
968 result = fold_builtin_stxcpy_chk (loc, callee,
969 gimple_call_arg (stmt, 0),
970 gimple_call_arg (stmt, 1),
971 gimple_call_arg (stmt, 2),
972 val[1], ignore,
973 DECL_FUNCTION_CODE (callee));
974 break;
976 case BUILT_IN_STRNCPY_CHK:
977 case BUILT_IN_STPNCPY_CHK:
978 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
979 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
980 gimple_call_arg (stmt, 1),
981 gimple_call_arg (stmt, 2),
982 gimple_call_arg (stmt, 3),
983 val[2], ignore,
984 DECL_FUNCTION_CODE (callee));
985 break;
987 case BUILT_IN_SNPRINTF_CHK:
988 case BUILT_IN_VSNPRINTF_CHK:
989 if (val[1] && is_gimple_val (val[1]))
990 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
991 DECL_FUNCTION_CODE (callee));
992 break;
994 default:
995 gcc_unreachable ();
998 if (result && ignore)
999 result = fold_ignored_result (result);
1000 return result;
1004 /* Return a binfo to be used for devirtualization of calls based on an object
1005 represented by a declaration (i.e. a global or automatically allocated one)
1006 or NULL if it cannot be found or is not safe. CST is expected to be an
1007 ADDR_EXPR of such object or the function will return NULL. Currently it is
1008 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1010 tree
1011 gimple_extract_devirt_binfo_from_cst (tree cst)
1013 HOST_WIDE_INT offset, size, max_size;
1014 tree base, type, expected_type, binfo;
1015 bool last_artificial = false;
1017 if (!flag_devirtualize
1018 || TREE_CODE (cst) != ADDR_EXPR
1019 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1020 return NULL_TREE;
1022 cst = TREE_OPERAND (cst, 0);
1023 expected_type = TREE_TYPE (cst);
1024 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1025 type = TREE_TYPE (base);
1026 if (!DECL_P (base)
1027 || max_size == -1
1028 || max_size != size
1029 || TREE_CODE (type) != RECORD_TYPE)
1030 return NULL_TREE;
1032 /* Find the sub-object the constant actually refers to and mark whether it is
1033 an artificial one (as opposed to a user-defined one). */
1034 while (true)
1036 HOST_WIDE_INT pos, size;
1037 tree fld;
1039 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1040 break;
1041 if (offset < 0)
1042 return NULL_TREE;
1044 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1046 if (TREE_CODE (fld) != FIELD_DECL)
1047 continue;
1049 pos = int_bit_position (fld);
1050 size = tree_low_cst (DECL_SIZE (fld), 1);
1051 if (pos <= offset && (pos + size) > offset)
1052 break;
1054 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1055 return NULL_TREE;
1057 last_artificial = DECL_ARTIFICIAL (fld);
1058 type = TREE_TYPE (fld);
1059 offset -= pos;
1061 /* Artificial sub-objects are ancestors, we do not want to use them for
1062 devirtualization, at least not here. */
1063 if (last_artificial)
1064 return NULL_TREE;
1065 binfo = TYPE_BINFO (type);
1066 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1067 return NULL_TREE;
1068 else
1069 return binfo;
1072 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1073 The statement may be replaced by another statement, e.g., if the call
1074 simplifies to a constant value. Return true if any changes were made.
1075 It is assumed that the operands have been previously folded. */
1077 static bool
1078 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1080 gimple stmt = gsi_stmt (*gsi);
1081 tree callee;
1082 bool changed = false;
1083 unsigned i;
1085 /* Fold *& in call arguments. */
1086 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1087 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1089 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1090 if (tmp)
1092 gimple_call_set_arg (stmt, i, tmp);
1093 changed = true;
1097 /* Check for virtual calls that became direct calls. */
1098 callee = gimple_call_fn (stmt);
1099 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1101 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1103 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1104 changed = true;
1106 else
1108 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1109 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1110 if (binfo)
1112 HOST_WIDE_INT token
1113 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1114 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1115 if (fndecl)
1117 gimple_call_set_fndecl (stmt, fndecl);
1118 changed = true;
1124 if (inplace)
1125 return changed;
1127 /* Check for builtins that CCP can handle using information not
1128 available in the generic fold routines. */
1129 callee = gimple_call_fndecl (stmt);
1130 if (callee && DECL_BUILT_IN (callee))
1132 tree result = gimple_fold_builtin (stmt);
1133 if (result)
1135 if (!update_call_from_tree (gsi, result))
1136 gimplify_and_update_call_from_tree (gsi, result);
1137 changed = true;
1141 return changed;
1144 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1145 distinguishes both cases. */
1147 static bool
1148 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1150 bool changed = false;
1151 gimple stmt = gsi_stmt (*gsi);
1152 unsigned i;
1153 gimple_stmt_iterator gsinext = *gsi;
1154 gimple next_stmt;
1156 gsi_next (&gsinext);
1157 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1159 /* Fold the main computation performed by the statement. */
1160 switch (gimple_code (stmt))
1162 case GIMPLE_ASSIGN:
1164 unsigned old_num_ops = gimple_num_ops (stmt);
1165 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1166 tree lhs = gimple_assign_lhs (stmt);
1167 tree new_rhs;
1168 /* First canonicalize operand order. This avoids building new
1169 trees if this is the only thing fold would later do. */
1170 if ((commutative_tree_code (subcode)
1171 || commutative_ternary_tree_code (subcode))
1172 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1173 gimple_assign_rhs2 (stmt), false))
1175 tree tem = gimple_assign_rhs1 (stmt);
1176 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1177 gimple_assign_set_rhs2 (stmt, tem);
1178 changed = true;
1180 new_rhs = fold_gimple_assign (gsi);
1181 if (new_rhs
1182 && !useless_type_conversion_p (TREE_TYPE (lhs),
1183 TREE_TYPE (new_rhs)))
1184 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1185 if (new_rhs
1186 && (!inplace
1187 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1189 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1190 changed = true;
1192 break;
1195 case GIMPLE_COND:
1196 changed |= fold_gimple_cond (stmt);
1197 break;
1199 case GIMPLE_CALL:
1200 changed |= gimple_fold_call (gsi, inplace);
1201 break;
1203 case GIMPLE_ASM:
1204 /* Fold *& in asm operands. */
1206 size_t noutputs;
1207 const char **oconstraints;
1208 const char *constraint;
1209 bool allows_mem, allows_reg;
1211 noutputs = gimple_asm_noutputs (stmt);
1212 oconstraints = XALLOCAVEC (const char *, noutputs);
1214 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1216 tree link = gimple_asm_output_op (stmt, i);
1217 tree op = TREE_VALUE (link);
1218 oconstraints[i]
1219 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1220 if (REFERENCE_CLASS_P (op)
1221 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1223 TREE_VALUE (link) = op;
1224 changed = true;
1227 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1229 tree link = gimple_asm_input_op (stmt, i);
1230 tree op = TREE_VALUE (link);
1231 constraint
1232 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1233 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1234 oconstraints, &allows_mem, &allows_reg);
1235 if (REFERENCE_CLASS_P (op)
1236 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1237 != NULL_TREE)
1239 TREE_VALUE (link) = op;
1240 changed = true;
1244 break;
1246 case GIMPLE_DEBUG:
1247 if (gimple_debug_bind_p (stmt))
1249 tree val = gimple_debug_bind_get_value (stmt);
1250 if (val
1251 && REFERENCE_CLASS_P (val))
1253 tree tem = maybe_fold_reference (val, false);
1254 if (tem)
1256 gimple_debug_bind_set_value (stmt, tem);
1257 changed = true;
1260 else if (val
1261 && TREE_CODE (val) == ADDR_EXPR)
1263 tree ref = TREE_OPERAND (val, 0);
1264 tree tem = maybe_fold_reference (ref, false);
1265 if (tem)
1267 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1268 gimple_debug_bind_set_value (stmt, tem);
1269 changed = true;
1273 break;
1275 default:;
1278 /* If stmt folds into nothing and it was the last stmt in a bb,
1279 don't call gsi_stmt. */
1280 if (gsi_end_p (*gsi))
1282 gcc_assert (next_stmt == NULL);
1283 return changed;
1286 stmt = gsi_stmt (*gsi);
1288 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1289 as we'd changing the next stmt. */
1290 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1292 tree lhs = gimple_get_lhs (stmt);
1293 if (lhs && REFERENCE_CLASS_P (lhs))
1295 tree new_lhs = maybe_fold_reference (lhs, true);
1296 if (new_lhs)
1298 gimple_set_lhs (stmt, new_lhs);
1299 changed = true;
1304 return changed;
1307 /* Fold the statement pointed to by GSI. In some cases, this function may
1308 replace the whole statement with a new one. Returns true iff folding
1309 makes any changes.
1310 The statement pointed to by GSI should be in valid gimple form but may
1311 be in unfolded state as resulting from for example constant propagation
1312 which can produce *&x = 0. */
1314 bool
1315 fold_stmt (gimple_stmt_iterator *gsi)
1317 return fold_stmt_1 (gsi, false);
1320 /* Perform the minimal folding on statement *GSI. Only operations like
1321 *&x created by constant propagation are handled. The statement cannot
1322 be replaced with a new one. Return true if the statement was
1323 changed, false otherwise.
1324 The statement *GSI should be in valid gimple form but may
1325 be in unfolded state as resulting from for example constant propagation
1326 which can produce *&x = 0. */
1328 bool
1329 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1331 gimple stmt = gsi_stmt (*gsi);
1332 bool changed = fold_stmt_1 (gsi, true);
1333 gcc_assert (gsi_stmt (*gsi) == stmt);
1334 return changed;
1337 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1338 if EXPR is null or we don't know how.
1339 If non-null, the result always has boolean type. */
1341 static tree
1342 canonicalize_bool (tree expr, bool invert)
1344 if (!expr)
1345 return NULL_TREE;
1346 else if (invert)
1348 if (integer_nonzerop (expr))
1349 return boolean_false_node;
1350 else if (integer_zerop (expr))
1351 return boolean_true_node;
1352 else if (TREE_CODE (expr) == SSA_NAME)
1353 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1354 build_int_cst (TREE_TYPE (expr), 0));
1355 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1356 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1357 boolean_type_node,
1358 TREE_OPERAND (expr, 0),
1359 TREE_OPERAND (expr, 1));
1360 else
1361 return NULL_TREE;
1363 else
1365 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1366 return expr;
1367 if (integer_nonzerop (expr))
1368 return boolean_true_node;
1369 else if (integer_zerop (expr))
1370 return boolean_false_node;
1371 else if (TREE_CODE (expr) == SSA_NAME)
1372 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1373 build_int_cst (TREE_TYPE (expr), 0));
1374 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1375 return fold_build2 (TREE_CODE (expr),
1376 boolean_type_node,
1377 TREE_OPERAND (expr, 0),
1378 TREE_OPERAND (expr, 1));
1379 else
1380 return NULL_TREE;
1384 /* Check to see if a boolean expression EXPR is logically equivalent to the
1385 comparison (OP1 CODE OP2). Check for various identities involving
1386 SSA_NAMEs. */
1388 static bool
1389 same_bool_comparison_p (const_tree expr, enum tree_code code,
1390 const_tree op1, const_tree op2)
1392 gimple s;
1394 /* The obvious case. */
1395 if (TREE_CODE (expr) == code
1396 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1397 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1398 return true;
1400 /* Check for comparing (name, name != 0) and the case where expr
1401 is an SSA_NAME with a definition matching the comparison. */
1402 if (TREE_CODE (expr) == SSA_NAME
1403 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1405 if (operand_equal_p (expr, op1, 0))
1406 return ((code == NE_EXPR && integer_zerop (op2))
1407 || (code == EQ_EXPR && integer_nonzerop (op2)));
1408 s = SSA_NAME_DEF_STMT (expr);
1409 if (is_gimple_assign (s)
1410 && gimple_assign_rhs_code (s) == code
1411 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1412 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1413 return true;
1416 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1417 of name is a comparison, recurse. */
1418 if (TREE_CODE (op1) == SSA_NAME
1419 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1421 s = SSA_NAME_DEF_STMT (op1);
1422 if (is_gimple_assign (s)
1423 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1425 enum tree_code c = gimple_assign_rhs_code (s);
1426 if ((c == NE_EXPR && integer_zerop (op2))
1427 || (c == EQ_EXPR && integer_nonzerop (op2)))
1428 return same_bool_comparison_p (expr, c,
1429 gimple_assign_rhs1 (s),
1430 gimple_assign_rhs2 (s));
1431 if ((c == EQ_EXPR && integer_zerop (op2))
1432 || (c == NE_EXPR && integer_nonzerop (op2)))
1433 return same_bool_comparison_p (expr,
1434 invert_tree_comparison (c, false),
1435 gimple_assign_rhs1 (s),
1436 gimple_assign_rhs2 (s));
1439 return false;
1442 /* Check to see if two boolean expressions OP1 and OP2 are logically
1443 equivalent. */
1445 static bool
1446 same_bool_result_p (const_tree op1, const_tree op2)
1448 /* Simple cases first. */
1449 if (operand_equal_p (op1, op2, 0))
1450 return true;
1452 /* Check the cases where at least one of the operands is a comparison.
1453 These are a bit smarter than operand_equal_p in that they apply some
1454 identifies on SSA_NAMEs. */
1455 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1456 && same_bool_comparison_p (op1, TREE_CODE (op2),
1457 TREE_OPERAND (op2, 0),
1458 TREE_OPERAND (op2, 1)))
1459 return true;
1460 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1461 && same_bool_comparison_p (op2, TREE_CODE (op1),
1462 TREE_OPERAND (op1, 0),
1463 TREE_OPERAND (op1, 1)))
1464 return true;
1466 /* Default case. */
1467 return false;
1470 /* Forward declarations for some mutually recursive functions. */
1472 static tree
1473 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1474 enum tree_code code2, tree op2a, tree op2b);
1475 static tree
1476 and_var_with_comparison (tree var, bool invert,
1477 enum tree_code code2, tree op2a, tree op2b);
1478 static tree
1479 and_var_with_comparison_1 (gimple stmt,
1480 enum tree_code code2, tree op2a, tree op2b);
1481 static tree
1482 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1483 enum tree_code code2, tree op2a, tree op2b);
1484 static tree
1485 or_var_with_comparison (tree var, bool invert,
1486 enum tree_code code2, tree op2a, tree op2b);
1487 static tree
1488 or_var_with_comparison_1 (gimple stmt,
1489 enum tree_code code2, tree op2a, tree op2b);
1491 /* Helper function for and_comparisons_1: try to simplify the AND of the
1492 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1493 If INVERT is true, invert the value of the VAR before doing the AND.
1494 Return NULL_EXPR if we can't simplify this to a single expression. */
1496 static tree
1497 and_var_with_comparison (tree var, bool invert,
1498 enum tree_code code2, tree op2a, tree op2b)
1500 tree t;
1501 gimple stmt = SSA_NAME_DEF_STMT (var);
1503 /* We can only deal with variables whose definitions are assignments. */
1504 if (!is_gimple_assign (stmt))
1505 return NULL_TREE;
1507 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1508 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1509 Then we only have to consider the simpler non-inverted cases. */
1510 if (invert)
1511 t = or_var_with_comparison_1 (stmt,
1512 invert_tree_comparison (code2, false),
1513 op2a, op2b);
1514 else
1515 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1516 return canonicalize_bool (t, invert);
1519 /* Try to simplify the AND of the ssa variable defined by the assignment
1520 STMT with the comparison specified by (OP2A CODE2 OP2B).
1521 Return NULL_EXPR if we can't simplify this to a single expression. */
1523 static tree
1524 and_var_with_comparison_1 (gimple stmt,
1525 enum tree_code code2, tree op2a, tree op2b)
1527 tree var = gimple_assign_lhs (stmt);
1528 tree true_test_var = NULL_TREE;
1529 tree false_test_var = NULL_TREE;
1530 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1532 /* Check for identities like (var AND (var == 0)) => false. */
1533 if (TREE_CODE (op2a) == SSA_NAME
1534 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1536 if ((code2 == NE_EXPR && integer_zerop (op2b))
1537 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1539 true_test_var = op2a;
1540 if (var == true_test_var)
1541 return var;
1543 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1544 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1546 false_test_var = op2a;
1547 if (var == false_test_var)
1548 return boolean_false_node;
1552 /* If the definition is a comparison, recurse on it. */
1553 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1555 tree t = and_comparisons_1 (innercode,
1556 gimple_assign_rhs1 (stmt),
1557 gimple_assign_rhs2 (stmt),
1558 code2,
1559 op2a,
1560 op2b);
1561 if (t)
1562 return t;
1565 /* If the definition is an AND or OR expression, we may be able to
1566 simplify by reassociating. */
1567 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1568 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1570 tree inner1 = gimple_assign_rhs1 (stmt);
1571 tree inner2 = gimple_assign_rhs2 (stmt);
1572 gimple s;
1573 tree t;
1574 tree partial = NULL_TREE;
1575 bool is_and = (innercode == BIT_AND_EXPR);
1577 /* Check for boolean identities that don't require recursive examination
1578 of inner1/inner2:
1579 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1580 inner1 AND (inner1 OR inner2) => inner1
1581 !inner1 AND (inner1 AND inner2) => false
1582 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1583 Likewise for similar cases involving inner2. */
1584 if (inner1 == true_test_var)
1585 return (is_and ? var : inner1);
1586 else if (inner2 == true_test_var)
1587 return (is_and ? var : inner2);
1588 else if (inner1 == false_test_var)
1589 return (is_and
1590 ? boolean_false_node
1591 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1592 else if (inner2 == false_test_var)
1593 return (is_and
1594 ? boolean_false_node
1595 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1597 /* Next, redistribute/reassociate the AND across the inner tests.
1598 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1599 if (TREE_CODE (inner1) == SSA_NAME
1600 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1601 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1602 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1603 gimple_assign_rhs1 (s),
1604 gimple_assign_rhs2 (s),
1605 code2, op2a, op2b)))
1607 /* Handle the AND case, where we are reassociating:
1608 (inner1 AND inner2) AND (op2a code2 op2b)
1609 => (t AND inner2)
1610 If the partial result t is a constant, we win. Otherwise
1611 continue on to try reassociating with the other inner test. */
1612 if (is_and)
1614 if (integer_onep (t))
1615 return inner2;
1616 else if (integer_zerop (t))
1617 return boolean_false_node;
1620 /* Handle the OR case, where we are redistributing:
1621 (inner1 OR inner2) AND (op2a code2 op2b)
1622 => (t OR (inner2 AND (op2a code2 op2b))) */
1623 else if (integer_onep (t))
1624 return boolean_true_node;
1626 /* Save partial result for later. */
1627 partial = t;
1630 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1631 if (TREE_CODE (inner2) == SSA_NAME
1632 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1633 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1634 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1635 gimple_assign_rhs1 (s),
1636 gimple_assign_rhs2 (s),
1637 code2, op2a, op2b)))
1639 /* Handle the AND case, where we are reassociating:
1640 (inner1 AND inner2) AND (op2a code2 op2b)
1641 => (inner1 AND t) */
1642 if (is_and)
1644 if (integer_onep (t))
1645 return inner1;
1646 else if (integer_zerop (t))
1647 return boolean_false_node;
1648 /* If both are the same, we can apply the identity
1649 (x AND x) == x. */
1650 else if (partial && same_bool_result_p (t, partial))
1651 return t;
1654 /* Handle the OR case. where we are redistributing:
1655 (inner1 OR inner2) AND (op2a code2 op2b)
1656 => (t OR (inner1 AND (op2a code2 op2b)))
1657 => (t OR partial) */
1658 else
1660 if (integer_onep (t))
1661 return boolean_true_node;
1662 else if (partial)
1664 /* We already got a simplification for the other
1665 operand to the redistributed OR expression. The
1666 interesting case is when at least one is false.
1667 Or, if both are the same, we can apply the identity
1668 (x OR x) == x. */
1669 if (integer_zerop (partial))
1670 return t;
1671 else if (integer_zerop (t))
1672 return partial;
1673 else if (same_bool_result_p (t, partial))
1674 return t;
1679 return NULL_TREE;
1682 /* Try to simplify the AND of two comparisons defined by
1683 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1684 If this can be done without constructing an intermediate value,
1685 return the resulting tree; otherwise NULL_TREE is returned.
1686 This function is deliberately asymmetric as it recurses on SSA_DEFs
1687 in the first comparison but not the second. */
1689 static tree
1690 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1691 enum tree_code code2, tree op2a, tree op2b)
1693 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1694 if (operand_equal_p (op1a, op2a, 0)
1695 && operand_equal_p (op1b, op2b, 0))
1697 /* Result will be either NULL_TREE, or a combined comparison. */
1698 tree t = combine_comparisons (UNKNOWN_LOCATION,
1699 TRUTH_ANDIF_EXPR, code1, code2,
1700 boolean_type_node, op1a, op1b);
1701 if (t)
1702 return t;
1705 /* Likewise the swapped case of the above. */
1706 if (operand_equal_p (op1a, op2b, 0)
1707 && operand_equal_p (op1b, op2a, 0))
1709 /* Result will be either NULL_TREE, or a combined comparison. */
1710 tree t = combine_comparisons (UNKNOWN_LOCATION,
1711 TRUTH_ANDIF_EXPR, code1,
1712 swap_tree_comparison (code2),
1713 boolean_type_node, op1a, op1b);
1714 if (t)
1715 return t;
1718 /* If both comparisons are of the same value against constants, we might
1719 be able to merge them. */
1720 if (operand_equal_p (op1a, op2a, 0)
1721 && TREE_CODE (op1b) == INTEGER_CST
1722 && TREE_CODE (op2b) == INTEGER_CST)
1724 int cmp = tree_int_cst_compare (op1b, op2b);
1726 /* If we have (op1a == op1b), we should either be able to
1727 return that or FALSE, depending on whether the constant op1b
1728 also satisfies the other comparison against op2b. */
1729 if (code1 == EQ_EXPR)
1731 bool done = true;
1732 bool val;
1733 switch (code2)
1735 case EQ_EXPR: val = (cmp == 0); break;
1736 case NE_EXPR: val = (cmp != 0); break;
1737 case LT_EXPR: val = (cmp < 0); break;
1738 case GT_EXPR: val = (cmp > 0); break;
1739 case LE_EXPR: val = (cmp <= 0); break;
1740 case GE_EXPR: val = (cmp >= 0); break;
1741 default: done = false;
1743 if (done)
1745 if (val)
1746 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1747 else
1748 return boolean_false_node;
1751 /* Likewise if the second comparison is an == comparison. */
1752 else if (code2 == EQ_EXPR)
1754 bool done = true;
1755 bool val;
1756 switch (code1)
1758 case EQ_EXPR: val = (cmp == 0); break;
1759 case NE_EXPR: val = (cmp != 0); break;
1760 case LT_EXPR: val = (cmp > 0); break;
1761 case GT_EXPR: val = (cmp < 0); break;
1762 case LE_EXPR: val = (cmp >= 0); break;
1763 case GE_EXPR: val = (cmp <= 0); break;
1764 default: done = false;
1766 if (done)
1768 if (val)
1769 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1770 else
1771 return boolean_false_node;
1775 /* Same business with inequality tests. */
1776 else if (code1 == NE_EXPR)
1778 bool val;
1779 switch (code2)
1781 case EQ_EXPR: val = (cmp != 0); break;
1782 case NE_EXPR: val = (cmp == 0); break;
1783 case LT_EXPR: val = (cmp >= 0); break;
1784 case GT_EXPR: val = (cmp <= 0); break;
1785 case LE_EXPR: val = (cmp > 0); break;
1786 case GE_EXPR: val = (cmp < 0); break;
1787 default:
1788 val = false;
1790 if (val)
1791 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1793 else if (code2 == NE_EXPR)
1795 bool val;
1796 switch (code1)
1798 case EQ_EXPR: val = (cmp == 0); break;
1799 case NE_EXPR: val = (cmp != 0); break;
1800 case LT_EXPR: val = (cmp <= 0); break;
1801 case GT_EXPR: val = (cmp >= 0); break;
1802 case LE_EXPR: val = (cmp < 0); break;
1803 case GE_EXPR: val = (cmp > 0); break;
1804 default:
1805 val = false;
1807 if (val)
1808 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1811 /* Chose the more restrictive of two < or <= comparisons. */
1812 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1813 && (code2 == LT_EXPR || code2 == LE_EXPR))
1815 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1816 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1817 else
1818 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1821 /* Likewise chose the more restrictive of two > or >= comparisons. */
1822 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1823 && (code2 == GT_EXPR || code2 == GE_EXPR))
1825 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1826 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1827 else
1828 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1831 /* Check for singleton ranges. */
1832 else if (cmp == 0
1833 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1834 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1835 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1837 /* Check for disjoint ranges. */
1838 else if (cmp <= 0
1839 && (code1 == LT_EXPR || code1 == LE_EXPR)
1840 && (code2 == GT_EXPR || code2 == GE_EXPR))
1841 return boolean_false_node;
1842 else if (cmp >= 0
1843 && (code1 == GT_EXPR || code1 == GE_EXPR)
1844 && (code2 == LT_EXPR || code2 == LE_EXPR))
1845 return boolean_false_node;
1848 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1849 NAME's definition is a truth value. See if there are any simplifications
1850 that can be done against the NAME's definition. */
1851 if (TREE_CODE (op1a) == SSA_NAME
1852 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1853 && (integer_zerop (op1b) || integer_onep (op1b)))
1855 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1856 || (code1 == NE_EXPR && integer_onep (op1b)));
1857 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1858 switch (gimple_code (stmt))
1860 case GIMPLE_ASSIGN:
1861 /* Try to simplify by copy-propagating the definition. */
1862 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1864 case GIMPLE_PHI:
1865 /* If every argument to the PHI produces the same result when
1866 ANDed with the second comparison, we win.
1867 Do not do this unless the type is bool since we need a bool
1868 result here anyway. */
1869 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1871 tree result = NULL_TREE;
1872 unsigned i;
1873 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1875 tree arg = gimple_phi_arg_def (stmt, i);
1877 /* If this PHI has itself as an argument, ignore it.
1878 If all the other args produce the same result,
1879 we're still OK. */
1880 if (arg == gimple_phi_result (stmt))
1881 continue;
1882 else if (TREE_CODE (arg) == INTEGER_CST)
1884 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1886 if (!result)
1887 result = boolean_false_node;
1888 else if (!integer_zerop (result))
1889 return NULL_TREE;
1891 else if (!result)
1892 result = fold_build2 (code2, boolean_type_node,
1893 op2a, op2b);
1894 else if (!same_bool_comparison_p (result,
1895 code2, op2a, op2b))
1896 return NULL_TREE;
1898 else if (TREE_CODE (arg) == SSA_NAME
1899 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1901 tree temp;
1902 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1903 /* In simple cases we can look through PHI nodes,
1904 but we have to be careful with loops.
1905 See PR49073. */
1906 if (! dom_info_available_p (CDI_DOMINATORS)
1907 || gimple_bb (def_stmt) == gimple_bb (stmt)
1908 || dominated_by_p (CDI_DOMINATORS,
1909 gimple_bb (def_stmt),
1910 gimple_bb (stmt)))
1911 return NULL_TREE;
1912 temp = and_var_with_comparison (arg, invert, code2,
1913 op2a, op2b);
1914 if (!temp)
1915 return NULL_TREE;
1916 else if (!result)
1917 result = temp;
1918 else if (!same_bool_result_p (result, temp))
1919 return NULL_TREE;
1921 else
1922 return NULL_TREE;
1924 return result;
1927 default:
1928 break;
1931 return NULL_TREE;
1934 /* Try to simplify the AND of two comparisons, specified by
1935 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1936 If this can be simplified to a single expression (without requiring
1937 introducing more SSA variables to hold intermediate values),
1938 return the resulting tree. Otherwise return NULL_TREE.
1939 If the result expression is non-null, it has boolean type. */
1941 tree
1942 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1943 enum tree_code code2, tree op2a, tree op2b)
1945 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1946 if (t)
1947 return t;
1948 else
1949 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1952 /* Helper function for or_comparisons_1: try to simplify the OR of the
1953 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1954 If INVERT is true, invert the value of VAR before doing the OR.
1955 Return NULL_EXPR if we can't simplify this to a single expression. */
1957 static tree
1958 or_var_with_comparison (tree var, bool invert,
1959 enum tree_code code2, tree op2a, tree op2b)
1961 tree t;
1962 gimple stmt = SSA_NAME_DEF_STMT (var);
1964 /* We can only deal with variables whose definitions are assignments. */
1965 if (!is_gimple_assign (stmt))
1966 return NULL_TREE;
1968 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1969 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1970 Then we only have to consider the simpler non-inverted cases. */
1971 if (invert)
1972 t = and_var_with_comparison_1 (stmt,
1973 invert_tree_comparison (code2, false),
1974 op2a, op2b);
1975 else
1976 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1977 return canonicalize_bool (t, invert);
1980 /* Try to simplify the OR of the ssa variable defined by the assignment
1981 STMT with the comparison specified by (OP2A CODE2 OP2B).
1982 Return NULL_EXPR if we can't simplify this to a single expression. */
1984 static tree
1985 or_var_with_comparison_1 (gimple stmt,
1986 enum tree_code code2, tree op2a, tree op2b)
1988 tree var = gimple_assign_lhs (stmt);
1989 tree true_test_var = NULL_TREE;
1990 tree false_test_var = NULL_TREE;
1991 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1993 /* Check for identities like (var OR (var != 0)) => true . */
1994 if (TREE_CODE (op2a) == SSA_NAME
1995 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1997 if ((code2 == NE_EXPR && integer_zerop (op2b))
1998 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2000 true_test_var = op2a;
2001 if (var == true_test_var)
2002 return var;
2004 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2005 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2007 false_test_var = op2a;
2008 if (var == false_test_var)
2009 return boolean_true_node;
2013 /* If the definition is a comparison, recurse on it. */
2014 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2016 tree t = or_comparisons_1 (innercode,
2017 gimple_assign_rhs1 (stmt),
2018 gimple_assign_rhs2 (stmt),
2019 code2,
2020 op2a,
2021 op2b);
2022 if (t)
2023 return t;
2026 /* If the definition is an AND or OR expression, we may be able to
2027 simplify by reassociating. */
2028 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2029 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2031 tree inner1 = gimple_assign_rhs1 (stmt);
2032 tree inner2 = gimple_assign_rhs2 (stmt);
2033 gimple s;
2034 tree t;
2035 tree partial = NULL_TREE;
2036 bool is_or = (innercode == BIT_IOR_EXPR);
2038 /* Check for boolean identities that don't require recursive examination
2039 of inner1/inner2:
2040 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2041 inner1 OR (inner1 AND inner2) => inner1
2042 !inner1 OR (inner1 OR inner2) => true
2043 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2045 if (inner1 == true_test_var)
2046 return (is_or ? var : inner1);
2047 else if (inner2 == true_test_var)
2048 return (is_or ? var : inner2);
2049 else if (inner1 == false_test_var)
2050 return (is_or
2051 ? boolean_true_node
2052 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2053 else if (inner2 == false_test_var)
2054 return (is_or
2055 ? boolean_true_node
2056 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2058 /* Next, redistribute/reassociate the OR across the inner tests.
2059 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2060 if (TREE_CODE (inner1) == SSA_NAME
2061 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2062 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2063 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2064 gimple_assign_rhs1 (s),
2065 gimple_assign_rhs2 (s),
2066 code2, op2a, op2b)))
2068 /* Handle the OR case, where we are reassociating:
2069 (inner1 OR inner2) OR (op2a code2 op2b)
2070 => (t OR inner2)
2071 If the partial result t is a constant, we win. Otherwise
2072 continue on to try reassociating with the other inner test. */
2073 if (is_or)
2075 if (integer_onep (t))
2076 return boolean_true_node;
2077 else if (integer_zerop (t))
2078 return inner2;
2081 /* Handle the AND case, where we are redistributing:
2082 (inner1 AND inner2) OR (op2a code2 op2b)
2083 => (t AND (inner2 OR (op2a code op2b))) */
2084 else if (integer_zerop (t))
2085 return boolean_false_node;
2087 /* Save partial result for later. */
2088 partial = t;
2091 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2092 if (TREE_CODE (inner2) == SSA_NAME
2093 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2094 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2095 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2096 gimple_assign_rhs1 (s),
2097 gimple_assign_rhs2 (s),
2098 code2, op2a, op2b)))
2100 /* Handle the OR case, where we are reassociating:
2101 (inner1 OR inner2) OR (op2a code2 op2b)
2102 => (inner1 OR t)
2103 => (t OR partial) */
2104 if (is_or)
2106 if (integer_zerop (t))
2107 return inner1;
2108 else if (integer_onep (t))
2109 return boolean_true_node;
2110 /* If both are the same, we can apply the identity
2111 (x OR x) == x. */
2112 else if (partial && same_bool_result_p (t, partial))
2113 return t;
2116 /* Handle the AND case, where we are redistributing:
2117 (inner1 AND inner2) OR (op2a code2 op2b)
2118 => (t AND (inner1 OR (op2a code2 op2b)))
2119 => (t AND partial) */
2120 else
2122 if (integer_zerop (t))
2123 return boolean_false_node;
2124 else if (partial)
2126 /* We already got a simplification for the other
2127 operand to the redistributed AND expression. The
2128 interesting case is when at least one is true.
2129 Or, if both are the same, we can apply the identity
2130 (x AND x) == x. */
2131 if (integer_onep (partial))
2132 return t;
2133 else if (integer_onep (t))
2134 return partial;
2135 else if (same_bool_result_p (t, partial))
2136 return t;
2141 return NULL_TREE;
2144 /* Try to simplify the OR of two comparisons defined by
2145 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2146 If this can be done without constructing an intermediate value,
2147 return the resulting tree; otherwise NULL_TREE is returned.
2148 This function is deliberately asymmetric as it recurses on SSA_DEFs
2149 in the first comparison but not the second. */
2151 static tree
2152 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2153 enum tree_code code2, tree op2a, tree op2b)
2155 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2156 if (operand_equal_p (op1a, op2a, 0)
2157 && operand_equal_p (op1b, op2b, 0))
2159 /* Result will be either NULL_TREE, or a combined comparison. */
2160 tree t = combine_comparisons (UNKNOWN_LOCATION,
2161 TRUTH_ORIF_EXPR, code1, code2,
2162 boolean_type_node, op1a, op1b);
2163 if (t)
2164 return t;
2167 /* Likewise the swapped case of the above. */
2168 if (operand_equal_p (op1a, op2b, 0)
2169 && operand_equal_p (op1b, op2a, 0))
2171 /* Result will be either NULL_TREE, or a combined comparison. */
2172 tree t = combine_comparisons (UNKNOWN_LOCATION,
2173 TRUTH_ORIF_EXPR, code1,
2174 swap_tree_comparison (code2),
2175 boolean_type_node, op1a, op1b);
2176 if (t)
2177 return t;
2180 /* If both comparisons are of the same value against constants, we might
2181 be able to merge them. */
2182 if (operand_equal_p (op1a, op2a, 0)
2183 && TREE_CODE (op1b) == INTEGER_CST
2184 && TREE_CODE (op2b) == INTEGER_CST)
2186 int cmp = tree_int_cst_compare (op1b, op2b);
2188 /* If we have (op1a != op1b), we should either be able to
2189 return that or TRUE, depending on whether the constant op1b
2190 also satisfies the other comparison against op2b. */
2191 if (code1 == NE_EXPR)
2193 bool done = true;
2194 bool val;
2195 switch (code2)
2197 case EQ_EXPR: val = (cmp == 0); break;
2198 case NE_EXPR: val = (cmp != 0); break;
2199 case LT_EXPR: val = (cmp < 0); break;
2200 case GT_EXPR: val = (cmp > 0); break;
2201 case LE_EXPR: val = (cmp <= 0); break;
2202 case GE_EXPR: val = (cmp >= 0); break;
2203 default: done = false;
2205 if (done)
2207 if (val)
2208 return boolean_true_node;
2209 else
2210 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2213 /* Likewise if the second comparison is a != comparison. */
2214 else if (code2 == NE_EXPR)
2216 bool done = true;
2217 bool val;
2218 switch (code1)
2220 case EQ_EXPR: val = (cmp == 0); break;
2221 case NE_EXPR: val = (cmp != 0); break;
2222 case LT_EXPR: val = (cmp > 0); break;
2223 case GT_EXPR: val = (cmp < 0); break;
2224 case LE_EXPR: val = (cmp >= 0); break;
2225 case GE_EXPR: val = (cmp <= 0); break;
2226 default: done = false;
2228 if (done)
2230 if (val)
2231 return boolean_true_node;
2232 else
2233 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2237 /* See if an equality test is redundant with the other comparison. */
2238 else if (code1 == EQ_EXPR)
2240 bool val;
2241 switch (code2)
2243 case EQ_EXPR: val = (cmp == 0); break;
2244 case NE_EXPR: val = (cmp != 0); break;
2245 case LT_EXPR: val = (cmp < 0); break;
2246 case GT_EXPR: val = (cmp > 0); break;
2247 case LE_EXPR: val = (cmp <= 0); break;
2248 case GE_EXPR: val = (cmp >= 0); break;
2249 default:
2250 val = false;
2252 if (val)
2253 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2255 else if (code2 == EQ_EXPR)
2257 bool val;
2258 switch (code1)
2260 case EQ_EXPR: val = (cmp == 0); break;
2261 case NE_EXPR: val = (cmp != 0); break;
2262 case LT_EXPR: val = (cmp > 0); break;
2263 case GT_EXPR: val = (cmp < 0); break;
2264 case LE_EXPR: val = (cmp >= 0); break;
2265 case GE_EXPR: val = (cmp <= 0); break;
2266 default:
2267 val = false;
2269 if (val)
2270 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2273 /* Chose the less restrictive of two < or <= comparisons. */
2274 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2275 && (code2 == LT_EXPR || code2 == LE_EXPR))
2277 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2278 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2279 else
2280 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2283 /* Likewise chose the less restrictive of two > or >= comparisons. */
2284 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2285 && (code2 == GT_EXPR || code2 == GE_EXPR))
2287 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2288 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2289 else
2290 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2293 /* Check for singleton ranges. */
2294 else if (cmp == 0
2295 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2296 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2297 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2299 /* Check for less/greater pairs that don't restrict the range at all. */
2300 else if (cmp >= 0
2301 && (code1 == LT_EXPR || code1 == LE_EXPR)
2302 && (code2 == GT_EXPR || code2 == GE_EXPR))
2303 return boolean_true_node;
2304 else if (cmp <= 0
2305 && (code1 == GT_EXPR || code1 == GE_EXPR)
2306 && (code2 == LT_EXPR || code2 == LE_EXPR))
2307 return boolean_true_node;
2310 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2311 NAME's definition is a truth value. See if there are any simplifications
2312 that can be done against the NAME's definition. */
2313 if (TREE_CODE (op1a) == SSA_NAME
2314 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2315 && (integer_zerop (op1b) || integer_onep (op1b)))
2317 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2318 || (code1 == NE_EXPR && integer_onep (op1b)));
2319 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2320 switch (gimple_code (stmt))
2322 case GIMPLE_ASSIGN:
2323 /* Try to simplify by copy-propagating the definition. */
2324 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2326 case GIMPLE_PHI:
2327 /* If every argument to the PHI produces the same result when
2328 ORed with the second comparison, we win.
2329 Do not do this unless the type is bool since we need a bool
2330 result here anyway. */
2331 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2333 tree result = NULL_TREE;
2334 unsigned i;
2335 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2337 tree arg = gimple_phi_arg_def (stmt, i);
2339 /* If this PHI has itself as an argument, ignore it.
2340 If all the other args produce the same result,
2341 we're still OK. */
2342 if (arg == gimple_phi_result (stmt))
2343 continue;
2344 else if (TREE_CODE (arg) == INTEGER_CST)
2346 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2348 if (!result)
2349 result = boolean_true_node;
2350 else if (!integer_onep (result))
2351 return NULL_TREE;
2353 else if (!result)
2354 result = fold_build2 (code2, boolean_type_node,
2355 op2a, op2b);
2356 else if (!same_bool_comparison_p (result,
2357 code2, op2a, op2b))
2358 return NULL_TREE;
2360 else if (TREE_CODE (arg) == SSA_NAME
2361 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2363 tree temp;
2364 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2365 /* In simple cases we can look through PHI nodes,
2366 but we have to be careful with loops.
2367 See PR49073. */
2368 if (! dom_info_available_p (CDI_DOMINATORS)
2369 || gimple_bb (def_stmt) == gimple_bb (stmt)
2370 || dominated_by_p (CDI_DOMINATORS,
2371 gimple_bb (def_stmt),
2372 gimple_bb (stmt)))
2373 return NULL_TREE;
2374 temp = or_var_with_comparison (arg, invert, code2,
2375 op2a, op2b);
2376 if (!temp)
2377 return NULL_TREE;
2378 else if (!result)
2379 result = temp;
2380 else if (!same_bool_result_p (result, temp))
2381 return NULL_TREE;
2383 else
2384 return NULL_TREE;
2386 return result;
2389 default:
2390 break;
2393 return NULL_TREE;
2396 /* Try to simplify the OR of two comparisons, specified by
2397 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2398 If this can be simplified to a single expression (without requiring
2399 introducing more SSA variables to hold intermediate values),
2400 return the resulting tree. Otherwise return NULL_TREE.
2401 If the result expression is non-null, it has boolean type. */
2403 tree
2404 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2405 enum tree_code code2, tree op2a, tree op2b)
2407 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2408 if (t)
2409 return t;
2410 else
2411 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2415 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2417 Either NULL_TREE, a simplified but non-constant or a constant
2418 is returned.
2420 ??? This should go into a gimple-fold-inline.h file to be eventually
2421 privatized with the single valueize function used in the various TUs
2422 to avoid the indirect function call overhead. */
2424 tree
2425 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2427 location_t loc = gimple_location (stmt);
2428 switch (gimple_code (stmt))
2430 case GIMPLE_ASSIGN:
2432 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2434 switch (get_gimple_rhs_class (subcode))
2436 case GIMPLE_SINGLE_RHS:
2438 tree rhs = gimple_assign_rhs1 (stmt);
2439 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2441 if (TREE_CODE (rhs) == SSA_NAME)
2443 /* If the RHS is an SSA_NAME, return its known constant value,
2444 if any. */
2445 return (*valueize) (rhs);
2447 /* Handle propagating invariant addresses into address
2448 operations. */
2449 else if (TREE_CODE (rhs) == ADDR_EXPR
2450 && !is_gimple_min_invariant (rhs))
2452 HOST_WIDE_INT offset = 0;
2453 tree base;
2454 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2455 &offset,
2456 valueize);
2457 if (base
2458 && (CONSTANT_CLASS_P (base)
2459 || decl_address_invariant_p (base)))
2460 return build_invariant_address (TREE_TYPE (rhs),
2461 base, offset);
2463 else if (TREE_CODE (rhs) == CONSTRUCTOR
2464 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2465 && (CONSTRUCTOR_NELTS (rhs)
2466 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2468 unsigned i;
2469 tree val, *vec;
2471 vec = XALLOCAVEC (tree,
2472 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2473 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2475 val = (*valueize) (val);
2476 if (TREE_CODE (val) == INTEGER_CST
2477 || TREE_CODE (val) == REAL_CST
2478 || TREE_CODE (val) == FIXED_CST)
2479 vec[i] = val;
2480 else
2481 return NULL_TREE;
2484 return build_vector (TREE_TYPE (rhs), vec);
2487 if (kind == tcc_reference)
2489 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2490 || TREE_CODE (rhs) == REALPART_EXPR
2491 || TREE_CODE (rhs) == IMAGPART_EXPR)
2492 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2494 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2495 return fold_unary_loc (EXPR_LOCATION (rhs),
2496 TREE_CODE (rhs),
2497 TREE_TYPE (rhs), val);
2499 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2500 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2502 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2503 return fold_ternary_loc (EXPR_LOCATION (rhs),
2504 TREE_CODE (rhs),
2505 TREE_TYPE (rhs), val,
2506 TREE_OPERAND (rhs, 1),
2507 TREE_OPERAND (rhs, 2));
2509 else if (TREE_CODE (rhs) == MEM_REF
2510 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2512 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2513 if (TREE_CODE (val) == ADDR_EXPR
2514 && is_gimple_min_invariant (val))
2516 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2517 unshare_expr (val),
2518 TREE_OPERAND (rhs, 1));
2519 if (tem)
2520 rhs = tem;
2523 return fold_const_aggregate_ref_1 (rhs, valueize);
2525 else if (kind == tcc_declaration)
2526 return get_symbol_constant_value (rhs);
2527 return rhs;
2530 case GIMPLE_UNARY_RHS:
2532 /* Handle unary operators that can appear in GIMPLE form.
2533 Note that we know the single operand must be a constant,
2534 so this should almost always return a simplified RHS. */
2535 tree lhs = gimple_assign_lhs (stmt);
2536 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2538 /* Conversions are useless for CCP purposes if they are
2539 value-preserving. Thus the restrictions that
2540 useless_type_conversion_p places for restrict qualification
2541 of pointer types should not apply here.
2542 Substitution later will only substitute to allowed places. */
2543 if (CONVERT_EXPR_CODE_P (subcode)
2544 && POINTER_TYPE_P (TREE_TYPE (lhs))
2545 && POINTER_TYPE_P (TREE_TYPE (op0))
2546 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2547 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2548 && TYPE_MODE (TREE_TYPE (lhs))
2549 == TYPE_MODE (TREE_TYPE (op0)))
2550 return op0;
2552 return
2553 fold_unary_ignore_overflow_loc (loc, subcode,
2554 gimple_expr_type (stmt), op0);
2557 case GIMPLE_BINARY_RHS:
2559 /* Handle binary operators that can appear in GIMPLE form. */
2560 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2561 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2563 /* Translate &x + CST into an invariant form suitable for
2564 further propagation. */
2565 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2566 && TREE_CODE (op0) == ADDR_EXPR
2567 && TREE_CODE (op1) == INTEGER_CST)
2569 tree off = fold_convert (ptr_type_node, op1);
2570 return build_fold_addr_expr_loc
2571 (loc,
2572 fold_build2 (MEM_REF,
2573 TREE_TYPE (TREE_TYPE (op0)),
2574 unshare_expr (op0), off));
2577 return fold_binary_loc (loc, subcode,
2578 gimple_expr_type (stmt), op0, op1);
2581 case GIMPLE_TERNARY_RHS:
2583 /* Handle ternary operators that can appear in GIMPLE form. */
2584 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2585 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2586 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2588 /* Fold embedded expressions in ternary codes. */
2589 if ((subcode == COND_EXPR
2590 || subcode == VEC_COND_EXPR)
2591 && COMPARISON_CLASS_P (op0))
2593 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2594 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2595 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2596 TREE_TYPE (op0), op00, op01);
2597 if (tem)
2598 op0 = tem;
2601 return fold_ternary_loc (loc, subcode,
2602 gimple_expr_type (stmt), op0, op1, op2);
2605 default:
2606 gcc_unreachable ();
2610 case GIMPLE_CALL:
2612 tree fn;
2614 if (gimple_call_internal_p (stmt))
2615 /* No folding yet for these functions. */
2616 return NULL_TREE;
2618 fn = (*valueize) (gimple_call_fn (stmt));
2619 if (TREE_CODE (fn) == ADDR_EXPR
2620 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2621 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2623 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2624 tree call, retval;
2625 unsigned i;
2626 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2627 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2628 call = build_call_array_loc (loc,
2629 gimple_call_return_type (stmt),
2630 fn, gimple_call_num_args (stmt), args);
2631 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2632 if (retval)
2633 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2634 STRIP_NOPS (retval);
2635 return retval;
2637 return NULL_TREE;
2640 default:
2641 return NULL_TREE;
2645 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2646 Returns NULL_TREE if folding to a constant is not possible, otherwise
2647 returns a constant according to is_gimple_min_invariant. */
2649 tree
2650 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2652 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2653 if (res && is_gimple_min_invariant (res))
2654 return res;
2655 return NULL_TREE;
2659 /* The following set of functions are supposed to fold references using
2660 their constant initializers. */
2662 static tree fold_ctor_reference (tree type, tree ctor,
2663 unsigned HOST_WIDE_INT offset,
2664 unsigned HOST_WIDE_INT size, tree);
2666 /* See if we can find constructor defining value of BASE.
2667 When we know the consructor with constant offset (such as
2668 base is array[40] and we do know constructor of array), then
2669 BIT_OFFSET is adjusted accordingly.
2671 As a special case, return error_mark_node when constructor
2672 is not explicitly available, but it is known to be zero
2673 such as 'static const int a;'. */
2674 static tree
2675 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2676 tree (*valueize)(tree))
2678 HOST_WIDE_INT bit_offset2, size, max_size;
2679 if (TREE_CODE (base) == MEM_REF)
2681 if (!integer_zerop (TREE_OPERAND (base, 1)))
2683 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2684 return NULL_TREE;
2685 *bit_offset += (mem_ref_offset (base).low
2686 * BITS_PER_UNIT);
2689 if (valueize
2690 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2691 base = valueize (TREE_OPERAND (base, 0));
2692 if (!base || TREE_CODE (base) != ADDR_EXPR)
2693 return NULL_TREE;
2694 base = TREE_OPERAND (base, 0);
2697 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2698 DECL_INITIAL. If BASE is a nested reference into another
2699 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2700 the inner reference. */
2701 switch (TREE_CODE (base))
2703 case VAR_DECL:
2704 if (!const_value_known_p (base))
2705 return NULL_TREE;
2707 /* Fallthru. */
2708 case CONST_DECL:
2709 if (!DECL_INITIAL (base)
2710 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2711 return error_mark_node;
2712 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2713 as special marker (_not_ zero ...) for its own purposes. */
2714 if (DECL_INITIAL (base) == error_mark_node)
2715 return NULL_TREE;
2716 return DECL_INITIAL (base);
2718 case ARRAY_REF:
2719 case COMPONENT_REF:
2720 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2721 if (max_size == -1 || size != max_size)
2722 return NULL_TREE;
2723 *bit_offset += bit_offset2;
2724 return get_base_constructor (base, bit_offset, valueize);
2726 case STRING_CST:
2727 case CONSTRUCTOR:
2728 return base;
2730 default:
2731 return NULL_TREE;
2735 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2736 to the memory at bit OFFSET.
2738 We do only simple job of folding byte accesses. */
2740 static tree
2741 fold_string_cst_ctor_reference (tree type, tree ctor,
2742 unsigned HOST_WIDE_INT offset,
2743 unsigned HOST_WIDE_INT size)
2745 if (INTEGRAL_TYPE_P (type)
2746 && (TYPE_MODE (type)
2747 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2748 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2749 == MODE_INT)
2750 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2751 && size == BITS_PER_UNIT
2752 && !(offset % BITS_PER_UNIT))
2754 offset /= BITS_PER_UNIT;
2755 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2756 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2757 [offset]));
2758 /* Folding
2759 const char a[20]="hello";
2760 return a[10];
2762 might lead to offset greater than string length. In this case we
2763 know value is either initialized to 0 or out of bounds. Return 0
2764 in both cases. */
2765 return build_zero_cst (type);
2767 return NULL_TREE;
2770 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2771 SIZE to the memory at bit OFFSET. */
2773 static tree
2774 fold_array_ctor_reference (tree type, tree ctor,
2775 unsigned HOST_WIDE_INT offset,
2776 unsigned HOST_WIDE_INT size,
2777 tree from_decl)
2779 unsigned HOST_WIDE_INT cnt;
2780 tree cfield, cval;
2781 double_int low_bound, elt_size;
2782 double_int index, max_index;
2783 double_int access_index;
2784 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2785 HOST_WIDE_INT inner_offset;
2787 /* Compute low bound and elt size. */
2788 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2789 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2790 if (domain_type && TYPE_MIN_VALUE (domain_type))
2792 /* Static constructors for variably sized objects makes no sense. */
2793 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2794 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2795 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2797 else
2798 low_bound = double_int_zero;
2799 /* Static constructors for variably sized objects makes no sense. */
2800 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2801 == INTEGER_CST);
2802 elt_size =
2803 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2806 /* We can handle only constantly sized accesses that are known to not
2807 be larger than size of array element. */
2808 if (!TYPE_SIZE_UNIT (type)
2809 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2810 || double_int_cmp (elt_size,
2811 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2812 return NULL_TREE;
2814 /* Compute the array index we look for. */
2815 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2816 elt_size, TRUNC_DIV_EXPR);
2817 access_index = double_int_add (access_index, low_bound);
2818 if (index_type)
2819 access_index = double_int_ext (access_index,
2820 TYPE_PRECISION (index_type),
2821 TYPE_UNSIGNED (index_type));
2823 /* And offset within the access. */
2824 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2826 /* See if the array field is large enough to span whole access. We do not
2827 care to fold accesses spanning multiple array indexes. */
2828 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2829 return NULL_TREE;
2831 index = double_int_sub (low_bound, double_int_one);
2832 if (index_type)
2833 index = double_int_ext (index,
2834 TYPE_PRECISION (index_type),
2835 TYPE_UNSIGNED (index_type));
2837 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2839 /* Array constructor might explicitely set index, or specify range
2840 or leave index NULL meaning that it is next index after previous
2841 one. */
2842 if (cfield)
2844 if (TREE_CODE (cfield) == INTEGER_CST)
2845 max_index = index = tree_to_double_int (cfield);
2846 else
2848 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2849 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2850 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2853 else
2855 index = double_int_add (index, double_int_one);
2856 if (index_type)
2857 index = double_int_ext (index,
2858 TYPE_PRECISION (index_type),
2859 TYPE_UNSIGNED (index_type));
2860 max_index = index;
2863 /* Do we have match? */
2864 if (double_int_cmp (access_index, index, 1) >= 0
2865 && double_int_cmp (access_index, max_index, 1) <= 0)
2866 return fold_ctor_reference (type, cval, inner_offset, size,
2867 from_decl);
2869 /* When memory is not explicitely mentioned in constructor,
2870 it is 0 (or out of range). */
2871 return build_zero_cst (type);
2874 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2875 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2877 static tree
2878 fold_nonarray_ctor_reference (tree type, tree ctor,
2879 unsigned HOST_WIDE_INT offset,
2880 unsigned HOST_WIDE_INT size,
2881 tree from_decl)
2883 unsigned HOST_WIDE_INT cnt;
2884 tree cfield, cval;
2886 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2887 cval)
2889 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2890 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2891 tree field_size = DECL_SIZE (cfield);
2892 double_int bitoffset;
2893 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2894 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2895 double_int bitoffset_end, access_end;
2897 /* Variable sized objects in static constructors makes no sense,
2898 but field_size can be NULL for flexible array members. */
2899 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2900 && TREE_CODE (byte_offset) == INTEGER_CST
2901 && (field_size != NULL_TREE
2902 ? TREE_CODE (field_size) == INTEGER_CST
2903 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2905 /* Compute bit offset of the field. */
2906 bitoffset = double_int_add (tree_to_double_int (field_offset),
2907 double_int_mul (byte_offset_cst,
2908 bits_per_unit_cst));
2909 /* Compute bit offset where the field ends. */
2910 if (field_size != NULL_TREE)
2911 bitoffset_end = double_int_add (bitoffset,
2912 tree_to_double_int (field_size));
2913 else
2914 bitoffset_end = double_int_zero;
2916 access_end = double_int_add (uhwi_to_double_int (offset),
2917 uhwi_to_double_int (size));
2919 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2920 [BITOFFSET, BITOFFSET_END)? */
2921 if (double_int_cmp (access_end, bitoffset, 0) > 0
2922 && (field_size == NULL_TREE
2923 || double_int_cmp (uhwi_to_double_int (offset),
2924 bitoffset_end, 0) < 0))
2926 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2927 bitoffset);
2928 /* We do have overlap. Now see if field is large enough to
2929 cover the access. Give up for accesses spanning multiple
2930 fields. */
2931 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2932 return NULL_TREE;
2933 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2934 return NULL_TREE;
2935 return fold_ctor_reference (type, cval,
2936 double_int_to_uhwi (inner_offset), size,
2937 from_decl);
2940 /* When memory is not explicitely mentioned in constructor, it is 0. */
2941 return build_zero_cst (type);
2944 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2945 to the memory at bit OFFSET. */
2947 static tree
2948 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2949 unsigned HOST_WIDE_INT size, tree from_decl)
2951 tree ret;
2953 /* We found the field with exact match. */
2954 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2955 && !offset)
2956 return canonicalize_constructor_val (ctor, from_decl);
2958 /* We are at the end of walk, see if we can view convert the
2959 result. */
2960 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2961 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2962 && operand_equal_p (TYPE_SIZE (type),
2963 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2965 ret = canonicalize_constructor_val (ctor, from_decl);
2966 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2967 if (ret)
2968 STRIP_NOPS (ret);
2969 return ret;
2971 if (TREE_CODE (ctor) == STRING_CST)
2972 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2973 if (TREE_CODE (ctor) == CONSTRUCTOR)
2976 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2977 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2978 return fold_array_ctor_reference (type, ctor, offset, size,
2979 from_decl);
2980 else
2981 return fold_nonarray_ctor_reference (type, ctor, offset, size,
2982 from_decl);
2985 return NULL_TREE;
2988 /* Return the tree representing the element referenced by T if T is an
2989 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2990 names using VALUEIZE. Return NULL_TREE otherwise. */
2992 tree
2993 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2995 tree ctor, idx, base;
2996 HOST_WIDE_INT offset, size, max_size;
2997 tree tem;
2999 if (TREE_THIS_VOLATILE (t))
3000 return NULL_TREE;
3002 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3003 return get_symbol_constant_value (t);
3005 tem = fold_read_from_constant_string (t);
3006 if (tem)
3007 return tem;
3009 switch (TREE_CODE (t))
3011 case ARRAY_REF:
3012 case ARRAY_RANGE_REF:
3013 /* Constant indexes are handled well by get_base_constructor.
3014 Only special case variable offsets.
3015 FIXME: This code can't handle nested references with variable indexes
3016 (they will be handled only by iteration of ccp). Perhaps we can bring
3017 get_ref_base_and_extent here and make it use a valueize callback. */
3018 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3019 && valueize
3020 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3021 && TREE_CODE (idx) == INTEGER_CST)
3023 tree low_bound, unit_size;
3024 double_int doffset;
3026 /* If the resulting bit-offset is constant, track it. */
3027 if ((low_bound = array_ref_low_bound (t),
3028 TREE_CODE (low_bound) == INTEGER_CST)
3029 && (unit_size = array_ref_element_size (t),
3030 host_integerp (unit_size, 1))
3031 && (doffset = double_int_sext
3032 (double_int_sub (TREE_INT_CST (idx),
3033 TREE_INT_CST (low_bound)),
3034 TYPE_PRECISION (TREE_TYPE (idx))),
3035 double_int_fits_in_shwi_p (doffset)))
3037 offset = double_int_to_shwi (doffset);
3038 offset *= TREE_INT_CST_LOW (unit_size);
3039 offset *= BITS_PER_UNIT;
3041 base = TREE_OPERAND (t, 0);
3042 ctor = get_base_constructor (base, &offset, valueize);
3043 /* Empty constructor. Always fold to 0. */
3044 if (ctor == error_mark_node)
3045 return build_zero_cst (TREE_TYPE (t));
3046 /* Out of bound array access. Value is undefined,
3047 but don't fold. */
3048 if (offset < 0)
3049 return NULL_TREE;
3050 /* We can not determine ctor. */
3051 if (!ctor)
3052 return NULL_TREE;
3053 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3054 TREE_INT_CST_LOW (unit_size)
3055 * BITS_PER_UNIT,
3056 base);
3059 /* Fallthru. */
3061 case COMPONENT_REF:
3062 case BIT_FIELD_REF:
3063 case TARGET_MEM_REF:
3064 case MEM_REF:
3065 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3066 ctor = get_base_constructor (base, &offset, valueize);
3068 /* Empty constructor. Always fold to 0. */
3069 if (ctor == error_mark_node)
3070 return build_zero_cst (TREE_TYPE (t));
3071 /* We do not know precise address. */
3072 if (max_size == -1 || max_size != size)
3073 return NULL_TREE;
3074 /* We can not determine ctor. */
3075 if (!ctor)
3076 return NULL_TREE;
3078 /* Out of bound array access. Value is undefined, but don't fold. */
3079 if (offset < 0)
3080 return NULL_TREE;
3082 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3083 base);
3085 case REALPART_EXPR:
3086 case IMAGPART_EXPR:
3088 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3089 if (c && TREE_CODE (c) == COMPLEX_CST)
3090 return fold_build1_loc (EXPR_LOCATION (t),
3091 TREE_CODE (t), TREE_TYPE (t), c);
3092 break;
3095 default:
3096 break;
3099 return NULL_TREE;
3102 tree
3103 fold_const_aggregate_ref (tree t)
3105 return fold_const_aggregate_ref_1 (t, NULL);
3108 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3109 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3110 KNOWN_BINFO carries the binfo describing the true type of
3111 OBJ_TYPE_REF_OBJECT(REF). */
3113 tree
3114 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3116 unsigned HOST_WIDE_INT offset, size;
3117 tree v, fn, vtable;
3119 vtable = v = BINFO_VTABLE (known_binfo);
3120 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3121 if (!v)
3122 return NULL_TREE;
3124 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3126 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3127 v = TREE_OPERAND (v, 0);
3129 else
3130 offset = 0;
3132 if (TREE_CODE (v) != ADDR_EXPR)
3133 return NULL_TREE;
3134 v = TREE_OPERAND (v, 0);
3136 if (TREE_CODE (v) != VAR_DECL
3137 || !DECL_VIRTUAL_P (v)
3138 || !DECL_INITIAL (v)
3139 || DECL_INITIAL (v) == error_mark_node)
3140 return NULL_TREE;
3141 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3142 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3143 offset += token * size;
3144 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3145 offset, size, vtable);
3146 if (!fn || integer_zerop (fn))
3147 return NULL_TREE;
3148 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3149 || TREE_CODE (fn) == FDESC_EXPR);
3150 fn = TREE_OPERAND (fn, 0);
3151 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3153 /* When cgraph node is missing and function is not public, we cannot
3154 devirtualize. This can happen in WHOPR when the actual method
3155 ends up in other partition, because we found devirtualization
3156 possibility too late. */
3157 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3158 return NULL_TREE;
3160 /* Make sure we create a cgraph node for functions we'll reference.
3161 They can be non-existent if the reference comes from an entry
3162 of an external vtable for example. */
3163 cgraph_get_create_node (fn);
3165 return fn;
3168 /* Return true iff VAL is a gimple expression that is known to be
3169 non-negative. Restricted to floating-point inputs. */
3171 bool
3172 gimple_val_nonnegative_real_p (tree val)
3174 gimple def_stmt;
3176 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3178 /* Use existing logic for non-gimple trees. */
3179 if (tree_expr_nonnegative_p (val))
3180 return true;
3182 if (TREE_CODE (val) != SSA_NAME)
3183 return false;
3185 /* Currently we look only at the immediately defining statement
3186 to make this determination, since recursion on defining
3187 statements of operands can lead to quadratic behavior in the
3188 worst case. This is expected to catch almost all occurrences
3189 in practice. It would be possible to implement limited-depth
3190 recursion if important cases are lost. Alternatively, passes
3191 that need this information (such as the pow/powi lowering code
3192 in the cse_sincos pass) could be revised to provide it through
3193 dataflow propagation. */
3195 def_stmt = SSA_NAME_DEF_STMT (val);
3197 if (is_gimple_assign (def_stmt))
3199 tree op0, op1;
3201 /* See fold-const.c:tree_expr_nonnegative_p for additional
3202 cases that could be handled with recursion. */
3204 switch (gimple_assign_rhs_code (def_stmt))
3206 case ABS_EXPR:
3207 /* Always true for floating-point operands. */
3208 return true;
3210 case MULT_EXPR:
3211 /* True if the two operands are identical (since we are
3212 restricted to floating-point inputs). */
3213 op0 = gimple_assign_rhs1 (def_stmt);
3214 op1 = gimple_assign_rhs2 (def_stmt);
3216 if (op0 == op1
3217 || operand_equal_p (op0, op1, 0))
3218 return true;
3220 default:
3221 return false;
3224 else if (is_gimple_call (def_stmt))
3226 tree fndecl = gimple_call_fndecl (def_stmt);
3227 if (fndecl
3228 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3230 tree arg1;
3232 switch (DECL_FUNCTION_CODE (fndecl))
3234 CASE_FLT_FN (BUILT_IN_ACOS):
3235 CASE_FLT_FN (BUILT_IN_ACOSH):
3236 CASE_FLT_FN (BUILT_IN_CABS):
3237 CASE_FLT_FN (BUILT_IN_COSH):
3238 CASE_FLT_FN (BUILT_IN_ERFC):
3239 CASE_FLT_FN (BUILT_IN_EXP):
3240 CASE_FLT_FN (BUILT_IN_EXP10):
3241 CASE_FLT_FN (BUILT_IN_EXP2):
3242 CASE_FLT_FN (BUILT_IN_FABS):
3243 CASE_FLT_FN (BUILT_IN_FDIM):
3244 CASE_FLT_FN (BUILT_IN_HYPOT):
3245 CASE_FLT_FN (BUILT_IN_POW10):
3246 return true;
3248 CASE_FLT_FN (BUILT_IN_SQRT):
3249 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3250 nonnegative inputs. */
3251 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3252 return true;
3254 break;
3256 CASE_FLT_FN (BUILT_IN_POWI):
3257 /* True if the second argument is an even integer. */
3258 arg1 = gimple_call_arg (def_stmt, 1);
3260 if (TREE_CODE (arg1) == INTEGER_CST
3261 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3262 return true;
3264 break;
3266 CASE_FLT_FN (BUILT_IN_POW):
3267 /* True if the second argument is an even integer-valued
3268 real. */
3269 arg1 = gimple_call_arg (def_stmt, 1);
3271 if (TREE_CODE (arg1) == REAL_CST)
3273 REAL_VALUE_TYPE c;
3274 HOST_WIDE_INT n;
3276 c = TREE_REAL_CST (arg1);
3277 n = real_to_integer (&c);
3279 if ((n & 1) == 0)
3281 REAL_VALUE_TYPE cint;
3282 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3283 if (real_identical (&c, &cint))
3284 return true;
3288 break;
3290 default:
3291 return false;
3296 return false;