2012-08-15 Janus Weil <janus@gcc.gnu.org>
[official-gcc.git] / gcc / gimple-fold.c
blobd708c80afa9ac748b227ce4142cf711faaa5bcda
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "dumpfile.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
31 #include "target.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
37 reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61 symtab_node snode;
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
65 external var. */
66 if (!from_decl
67 || TREE_CODE (from_decl) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl)
69 || (flag_ltrans
70 && symtab_get_node (from_decl)->symbol.in_other_partition))
71 return true;
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
74 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
75 return true;
76 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
77 are always safe. */
78 if (DECL_EXTERNAL (decl)
79 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
80 return true;
81 /* We are folding reference from external vtable. The vtable may reffer
82 to a symbol keyed to other compilation unit. The other compilation
83 unit may be in separate DSO and the symbol may be hidden. */
84 if (DECL_VISIBILITY_SPECIFIED (decl)
85 && DECL_EXTERNAL (decl)
86 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition))
87 return false;
88 /* When function is public, we always can introduce new reference.
89 Exception are the COMDAT functions where introducing a direct
90 reference imply need to include function body in the curren tunit. */
91 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
92 return true;
93 /* We are not at ltrans stage; so don't worry about WHOPR.
94 Also when still gimplifying all referred comdat functions will be
95 produced.
97 As observed in PR20991 for already optimized out comdat virtual functions
98 it may be tempting to not necessarily give up because the copy will be
99 output elsewhere when corresponding vtable is output.
100 This is however not possible - ABI specify that COMDATs are output in
101 units where they are used and when the other unit was compiled with LTO
102 it is possible that vtable was kept public while the function itself
103 was privatized. */
104 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
105 return true;
107 /* OK we are seeing either COMDAT or static variable. In this case we must
108 check that the definition is still around so we can refer it. */
109 if (TREE_CODE (decl) == FUNCTION_DECL)
111 node = cgraph_get_node (decl);
112 /* Check that we still have function body and that we didn't took
113 the decision to eliminate offline copy of the function yet.
114 The second is important when devirtualization happens during final
115 compilation stage when making a new reference no longer makes callee
116 to be compiled. */
117 if (!node || !node->analyzed || node->global.inlined_to)
119 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
120 return false;
123 else if (TREE_CODE (decl) == VAR_DECL)
125 vnode = varpool_get_node (decl);
126 if (!vnode || !vnode->analyzed)
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
129 return false;
132 return true;
135 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
136 acceptable form for is_gimple_min_invariant.
137 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
139 tree
140 canonicalize_constructor_val (tree cval, tree from_decl)
142 STRIP_USELESS_TYPE_CONVERSION (cval);
143 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
144 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
146 tree ptr = TREE_OPERAND (cval, 0);
147 if (is_gimple_min_invariant (ptr))
148 cval = build1_loc (EXPR_LOCATION (cval),
149 ADDR_EXPR, TREE_TYPE (ptr),
150 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
151 ptr,
152 fold_convert (ptr_type_node,
153 TREE_OPERAND (cval, 1))));
155 if (TREE_CODE (cval) == ADDR_EXPR)
157 tree base = get_base_address (TREE_OPERAND (cval, 0));
158 if (!base && TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
160 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
161 if (base)
162 TREE_OPERAND (cval, 0) = base;
164 if (!base)
165 return NULL_TREE;
167 if ((TREE_CODE (base) == VAR_DECL
168 || TREE_CODE (base) == FUNCTION_DECL)
169 && !can_refer_decl_in_current_unit_p (base, from_decl))
170 return NULL_TREE;
171 if (TREE_CODE (base) == VAR_DECL)
172 TREE_ADDRESSABLE (base) = 1;
173 else if (TREE_CODE (base) == FUNCTION_DECL)
175 /* Make sure we create a cgraph node for functions we'll reference.
176 They can be non-existent if the reference comes from an entry
177 of an external vtable for example. */
178 cgraph_get_create_node (base);
180 /* Fixup types in global initializers. */
181 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
182 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
184 return cval;
187 /* If SYM is a constant variable with known value, return the value.
188 NULL_TREE is returned otherwise. */
190 tree
191 get_symbol_constant_value (tree sym)
193 if (const_value_known_p (sym))
195 tree val = DECL_INITIAL (sym);
196 if (val)
198 val = canonicalize_constructor_val (val, sym);
199 if (val && is_gimple_min_invariant (val))
200 return val;
201 else
202 return NULL_TREE;
204 /* Variables declared 'const' without an initializer
205 have zero as the initializer if they may not be
206 overridden at link or run time. */
207 if (!val
208 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
209 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
210 return build_zero_cst (TREE_TYPE (sym));
213 return NULL_TREE;
218 /* Subroutine of fold_stmt. We perform several simplifications of the
219 memory reference tree EXPR and make sure to re-gimplify them properly
220 after propagation of constant addresses. IS_LHS is true if the
221 reference is supposed to be an lvalue. */
223 static tree
224 maybe_fold_reference (tree expr, bool is_lhs)
226 tree *t = &expr;
227 tree result;
229 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
230 || TREE_CODE (expr) == REALPART_EXPR
231 || TREE_CODE (expr) == IMAGPART_EXPR)
232 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
233 return fold_unary_loc (EXPR_LOCATION (expr),
234 TREE_CODE (expr),
235 TREE_TYPE (expr),
236 TREE_OPERAND (expr, 0));
237 else if (TREE_CODE (expr) == BIT_FIELD_REF
238 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
239 return fold_ternary_loc (EXPR_LOCATION (expr),
240 TREE_CODE (expr),
241 TREE_TYPE (expr),
242 TREE_OPERAND (expr, 0),
243 TREE_OPERAND (expr, 1),
244 TREE_OPERAND (expr, 2));
246 while (handled_component_p (*t))
247 t = &TREE_OPERAND (*t, 0);
249 /* Canonicalize MEM_REFs invariant address operand. Do this first
250 to avoid feeding non-canonical MEM_REFs elsewhere. */
251 if (TREE_CODE (*t) == MEM_REF
252 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
254 bool volatile_p = TREE_THIS_VOLATILE (*t);
255 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
256 TREE_OPERAND (*t, 0),
257 TREE_OPERAND (*t, 1));
258 if (tem)
260 TREE_THIS_VOLATILE (tem) = volatile_p;
261 *t = tem;
262 tem = maybe_fold_reference (expr, is_lhs);
263 if (tem)
264 return tem;
265 return expr;
269 if (!is_lhs
270 && (result = fold_const_aggregate_ref (expr))
271 && is_gimple_min_invariant (result))
272 return result;
274 /* Fold back MEM_REFs to reference trees. */
275 if (TREE_CODE (*t) == MEM_REF
276 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
277 && integer_zerop (TREE_OPERAND (*t, 1))
278 && (TREE_THIS_VOLATILE (*t)
279 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
280 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
281 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
282 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
283 /* We have to look out here to not drop a required conversion
284 from the rhs to the lhs if is_lhs, but we don't have the
285 rhs here to verify that. Thus require strict type
286 compatibility. */
287 && types_compatible_p (TREE_TYPE (*t),
288 TREE_TYPE (TREE_OPERAND
289 (TREE_OPERAND (*t, 0), 0))))
291 tree tem;
292 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
293 tem = maybe_fold_reference (expr, is_lhs);
294 if (tem)
295 return tem;
296 return expr;
298 else if (TREE_CODE (*t) == TARGET_MEM_REF)
300 tree tem = maybe_fold_tmr (*t);
301 if (tem)
303 *t = tem;
304 tem = maybe_fold_reference (expr, is_lhs);
305 if (tem)
306 return tem;
307 return expr;
311 return NULL_TREE;
315 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
316 replacement rhs for the statement or NULL_TREE if no simplification
317 could be made. It is assumed that the operands have been previously
318 folded. */
320 static tree
321 fold_gimple_assign (gimple_stmt_iterator *si)
323 gimple stmt = gsi_stmt (*si);
324 enum tree_code subcode = gimple_assign_rhs_code (stmt);
325 location_t loc = gimple_location (stmt);
327 tree result = NULL_TREE;
329 switch (get_gimple_rhs_class (subcode))
331 case GIMPLE_SINGLE_RHS:
333 tree rhs = gimple_assign_rhs1 (stmt);
335 if (REFERENCE_CLASS_P (rhs))
336 return maybe_fold_reference (rhs, false);
338 else if (TREE_CODE (rhs) == ADDR_EXPR)
340 tree ref = TREE_OPERAND (rhs, 0);
341 tree tem = maybe_fold_reference (ref, true);
342 if (tem
343 && TREE_CODE (tem) == MEM_REF
344 && integer_zerop (TREE_OPERAND (tem, 1)))
345 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
346 else if (tem)
347 result = fold_convert (TREE_TYPE (rhs),
348 build_fold_addr_expr_loc (loc, tem));
349 else if (TREE_CODE (ref) == MEM_REF
350 && integer_zerop (TREE_OPERAND (ref, 1)))
351 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
354 else if (TREE_CODE (rhs) == CONSTRUCTOR
355 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
356 && (CONSTRUCTOR_NELTS (rhs)
357 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
359 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
360 unsigned i;
361 tree val;
363 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
364 if (TREE_CODE (val) != INTEGER_CST
365 && TREE_CODE (val) != REAL_CST
366 && TREE_CODE (val) != FIXED_CST)
367 return NULL_TREE;
369 return build_vector_from_ctor (TREE_TYPE (rhs),
370 CONSTRUCTOR_ELTS (rhs));
373 else if (DECL_P (rhs))
374 return unshare_expr (get_symbol_constant_value (rhs));
376 /* If we couldn't fold the RHS, hand over to the generic
377 fold routines. */
378 if (result == NULL_TREE)
379 result = fold (rhs);
381 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
382 that may have been added by fold, and "useless" type
383 conversions that might now be apparent due to propagation. */
384 STRIP_USELESS_TYPE_CONVERSION (result);
386 if (result != rhs && valid_gimple_rhs_p (result))
387 return result;
389 return NULL_TREE;
391 break;
393 case GIMPLE_UNARY_RHS:
395 tree rhs = gimple_assign_rhs1 (stmt);
397 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
398 if (result)
400 /* If the operation was a conversion do _not_ mark a
401 resulting constant with TREE_OVERFLOW if the original
402 constant was not. These conversions have implementation
403 defined behavior and retaining the TREE_OVERFLOW flag
404 here would confuse later passes such as VRP. */
405 if (CONVERT_EXPR_CODE_P (subcode)
406 && TREE_CODE (result) == INTEGER_CST
407 && TREE_CODE (rhs) == INTEGER_CST)
408 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
410 STRIP_USELESS_TYPE_CONVERSION (result);
411 if (valid_gimple_rhs_p (result))
412 return result;
415 break;
417 case GIMPLE_BINARY_RHS:
418 /* Try to canonicalize for boolean-typed X the comparisons
419 X == 0, X == 1, X != 0, and X != 1. */
420 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
421 || gimple_assign_rhs_code (stmt) == NE_EXPR)
423 tree lhs = gimple_assign_lhs (stmt);
424 tree op1 = gimple_assign_rhs1 (stmt);
425 tree op2 = gimple_assign_rhs2 (stmt);
426 tree type = TREE_TYPE (op1);
428 /* Check whether the comparison operands are of the same boolean
429 type as the result type is.
430 Check that second operand is an integer-constant with value
431 one or zero. */
432 if (TREE_CODE (op2) == INTEGER_CST
433 && (integer_zerop (op2) || integer_onep (op2))
434 && useless_type_conversion_p (TREE_TYPE (lhs), type))
436 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
437 bool is_logical_not = false;
439 /* X == 0 and X != 1 is a logical-not.of X
440 X == 1 and X != 0 is X */
441 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
442 || (cmp_code == NE_EXPR && integer_onep (op2)))
443 is_logical_not = true;
445 if (is_logical_not == false)
446 result = op1;
447 /* Only for one-bit precision typed X the transformation
448 !X -> ~X is valied. */
449 else if (TYPE_PRECISION (type) == 1)
450 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
451 type, op1);
452 /* Otherwise we use !X -> X ^ 1. */
453 else
454 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
455 type, op1, build_int_cst (type, 1));
460 if (!result)
461 result = fold_binary_loc (loc, subcode,
462 TREE_TYPE (gimple_assign_lhs (stmt)),
463 gimple_assign_rhs1 (stmt),
464 gimple_assign_rhs2 (stmt));
466 if (result)
468 STRIP_USELESS_TYPE_CONVERSION (result);
469 if (valid_gimple_rhs_p (result))
470 return result;
472 break;
474 case GIMPLE_TERNARY_RHS:
475 /* Try to fold a conditional expression. */
476 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
478 tree op0 = gimple_assign_rhs1 (stmt);
479 tree tem;
480 bool set = false;
481 location_t cond_loc = gimple_location (stmt);
483 if (COMPARISON_CLASS_P (op0))
485 fold_defer_overflow_warnings ();
486 tem = fold_binary_loc (cond_loc,
487 TREE_CODE (op0), TREE_TYPE (op0),
488 TREE_OPERAND (op0, 0),
489 TREE_OPERAND (op0, 1));
490 /* This is actually a conditional expression, not a GIMPLE
491 conditional statement, however, the valid_gimple_rhs_p
492 test still applies. */
493 set = (tem && is_gimple_condexpr (tem)
494 && valid_gimple_rhs_p (tem));
495 fold_undefer_overflow_warnings (set, stmt, 0);
497 else if (is_gimple_min_invariant (op0))
499 tem = op0;
500 set = true;
502 else
503 return NULL_TREE;
505 if (set)
506 result = fold_build3_loc (cond_loc, COND_EXPR,
507 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
508 gimple_assign_rhs2 (stmt),
509 gimple_assign_rhs3 (stmt));
512 if (!result)
513 result = fold_ternary_loc (loc, subcode,
514 TREE_TYPE (gimple_assign_lhs (stmt)),
515 gimple_assign_rhs1 (stmt),
516 gimple_assign_rhs2 (stmt),
517 gimple_assign_rhs3 (stmt));
519 if (result)
521 STRIP_USELESS_TYPE_CONVERSION (result);
522 if (valid_gimple_rhs_p (result))
523 return result;
525 break;
527 case GIMPLE_INVALID_RHS:
528 gcc_unreachable ();
531 return NULL_TREE;
534 /* Attempt to fold a conditional statement. Return true if any changes were
535 made. We only attempt to fold the condition expression, and do not perform
536 any transformation that would require alteration of the cfg. It is
537 assumed that the operands have been previously folded. */
539 static bool
540 fold_gimple_cond (gimple stmt)
542 tree result = fold_binary_loc (gimple_location (stmt),
543 gimple_cond_code (stmt),
544 boolean_type_node,
545 gimple_cond_lhs (stmt),
546 gimple_cond_rhs (stmt));
548 if (result)
550 STRIP_USELESS_TYPE_CONVERSION (result);
551 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
553 gimple_cond_set_condition_from_tree (stmt, result);
554 return true;
558 return false;
561 /* Convert EXPR into a GIMPLE value suitable for substitution on the
562 RHS of an assignment. Insert the necessary statements before
563 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
564 is replaced. If the call is expected to produces a result, then it
565 is replaced by an assignment of the new RHS to the result variable.
566 If the result is to be ignored, then the call is replaced by a
567 GIMPLE_NOP. A proper VDEF chain is retained by making the first
568 VUSE and the last VDEF of the whole sequence be the same as the replaced
569 statement and using new SSA names for stores in between. */
571 void
572 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
574 tree lhs;
575 gimple stmt, new_stmt;
576 gimple_stmt_iterator i;
577 gimple_seq stmts = NULL;
578 struct gimplify_ctx gctx;
579 gimple laststore;
580 tree reaching_vuse;
582 stmt = gsi_stmt (*si_p);
584 gcc_assert (is_gimple_call (stmt));
586 push_gimplify_context (&gctx);
587 gctx.into_ssa = gimple_in_ssa_p (cfun);
589 lhs = gimple_call_lhs (stmt);
590 if (lhs == NULL_TREE)
592 gimplify_and_add (expr, &stmts);
593 /* We can end up with folding a memcpy of an empty class assignment
594 which gets optimized away by C++ gimplification. */
595 if (gimple_seq_empty_p (stmts))
597 pop_gimplify_context (NULL);
598 if (gimple_in_ssa_p (cfun))
600 unlink_stmt_vdef (stmt);
601 release_defs (stmt);
603 gsi_remove (si_p, true);
604 return;
607 else
609 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
610 new_stmt = gimple_build_assign (lhs, tmp);
611 i = gsi_last (stmts);
612 gsi_insert_after_without_update (&i, new_stmt,
613 GSI_CONTINUE_LINKING);
616 pop_gimplify_context (NULL);
618 if (gimple_has_location (stmt))
619 annotate_all_with_location (stmts, gimple_location (stmt));
621 /* First iterate over the replacement statements backward, assigning
622 virtual operands to their defining statements. */
623 laststore = NULL;
624 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
626 new_stmt = gsi_stmt (i);
627 if ((gimple_assign_single_p (new_stmt)
628 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
629 || (is_gimple_call (new_stmt)
630 && (gimple_call_flags (new_stmt)
631 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
633 tree vdef;
634 if (!laststore)
635 vdef = gimple_vdef (stmt);
636 else
637 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
638 gimple_set_vdef (new_stmt, vdef);
639 if (vdef && TREE_CODE (vdef) == SSA_NAME)
640 SSA_NAME_DEF_STMT (vdef) = new_stmt;
641 laststore = new_stmt;
645 /* Second iterate over the statements forward, assigning virtual
646 operands to their uses. */
647 reaching_vuse = gimple_vuse (stmt);
648 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
650 new_stmt = gsi_stmt (i);
651 /* If the new statement possibly has a VUSE, update it with exact SSA
652 name we know will reach this one. */
653 if (gimple_has_mem_ops (new_stmt))
654 gimple_set_vuse (new_stmt, reaching_vuse);
655 gimple_set_modified (new_stmt, true);
656 if (gimple_vdef (new_stmt))
657 reaching_vuse = gimple_vdef (new_stmt);
660 /* If the new sequence does not do a store release the virtual
661 definition of the original statement. */
662 if (reaching_vuse
663 && reaching_vuse == gimple_vuse (stmt))
665 tree vdef = gimple_vdef (stmt);
666 if (vdef
667 && TREE_CODE (vdef) == SSA_NAME)
669 unlink_stmt_vdef (stmt);
670 release_ssa_name (vdef);
674 /* Finally replace the original statement with the sequence. */
675 gsi_replace_with_seq (si_p, stmts, false);
678 /* Return the string length, maximum string length or maximum value of
679 ARG in LENGTH.
680 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
681 is not NULL and, for TYPE == 0, its value is not equal to the length
682 we determine or if we are unable to determine the length or value,
683 return false. VISITED is a bitmap of visited variables.
684 TYPE is 0 if string length should be returned, 1 for maximum string
685 length and 2 for maximum value ARG can have. */
687 static bool
688 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
690 tree var, val;
691 gimple def_stmt;
693 if (TREE_CODE (arg) != SSA_NAME)
695 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
696 if (TREE_CODE (arg) == ADDR_EXPR
697 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
698 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
700 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
701 if (TREE_CODE (aop0) == INDIRECT_REF
702 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
703 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
704 length, visited, type);
707 if (type == 2)
709 val = arg;
710 if (TREE_CODE (val) != INTEGER_CST
711 || tree_int_cst_sgn (val) < 0)
712 return false;
714 else
715 val = c_strlen (arg, 1);
716 if (!val)
717 return false;
719 if (*length)
721 if (type > 0)
723 if (TREE_CODE (*length) != INTEGER_CST
724 || TREE_CODE (val) != INTEGER_CST)
725 return false;
727 if (tree_int_cst_lt (*length, val))
728 *length = val;
729 return true;
731 else if (simple_cst_equal (val, *length) != 1)
732 return false;
735 *length = val;
736 return true;
739 /* If we were already here, break the infinite cycle. */
740 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
741 return true;
743 var = arg;
744 def_stmt = SSA_NAME_DEF_STMT (var);
746 switch (gimple_code (def_stmt))
748 case GIMPLE_ASSIGN:
749 /* The RHS of the statement defining VAR must either have a
750 constant length or come from another SSA_NAME with a constant
751 length. */
752 if (gimple_assign_single_p (def_stmt)
753 || gimple_assign_unary_nop_p (def_stmt))
755 tree rhs = gimple_assign_rhs1 (def_stmt);
756 return get_maxval_strlen (rhs, length, visited, type);
758 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
760 tree op2 = gimple_assign_rhs2 (def_stmt);
761 tree op3 = gimple_assign_rhs3 (def_stmt);
762 return get_maxval_strlen (op2, length, visited, type)
763 && get_maxval_strlen (op3, length, visited, type);
765 return false;
767 case GIMPLE_PHI:
769 /* All the arguments of the PHI node must have the same constant
770 length. */
771 unsigned i;
773 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
775 tree arg = gimple_phi_arg (def_stmt, i)->def;
777 /* If this PHI has itself as an argument, we cannot
778 determine the string length of this argument. However,
779 if we can find a constant string length for the other
780 PHI args then we can still be sure that this is a
781 constant string length. So be optimistic and just
782 continue with the next argument. */
783 if (arg == gimple_phi_result (def_stmt))
784 continue;
786 if (!get_maxval_strlen (arg, length, visited, type))
787 return false;
790 return true;
792 default:
793 return false;
798 /* Fold builtin call in statement STMT. Returns a simplified tree.
799 We may return a non-constant expression, including another call
800 to a different function and with different arguments, e.g.,
801 substituting memcpy for strcpy when the string length is known.
802 Note that some builtins expand into inline code that may not
803 be valid in GIMPLE. Callers must take care. */
805 tree
806 gimple_fold_builtin (gimple stmt)
808 tree result, val[3];
809 tree callee, a;
810 int arg_idx, type;
811 bitmap visited;
812 bool ignore;
813 int nargs;
814 location_t loc = gimple_location (stmt);
816 gcc_assert (is_gimple_call (stmt));
818 ignore = (gimple_call_lhs (stmt) == NULL);
820 /* First try the generic builtin folder. If that succeeds, return the
821 result directly. */
822 result = fold_call_stmt (stmt, ignore);
823 if (result)
825 if (ignore)
826 STRIP_NOPS (result);
827 return result;
830 /* Ignore MD builtins. */
831 callee = gimple_call_fndecl (stmt);
832 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
833 return NULL_TREE;
835 /* Give up for always_inline inline builtins until they are
836 inlined. */
837 if (avoid_folding_inline_builtin (callee))
838 return NULL_TREE;
840 /* If the builtin could not be folded, and it has no argument list,
841 we're done. */
842 nargs = gimple_call_num_args (stmt);
843 if (nargs == 0)
844 return NULL_TREE;
846 /* Limit the work only for builtins we know how to simplify. */
847 switch (DECL_FUNCTION_CODE (callee))
849 case BUILT_IN_STRLEN:
850 case BUILT_IN_FPUTS:
851 case BUILT_IN_FPUTS_UNLOCKED:
852 arg_idx = 0;
853 type = 0;
854 break;
855 case BUILT_IN_STRCPY:
856 case BUILT_IN_STRNCPY:
857 arg_idx = 1;
858 type = 0;
859 break;
860 case BUILT_IN_MEMCPY_CHK:
861 case BUILT_IN_MEMPCPY_CHK:
862 case BUILT_IN_MEMMOVE_CHK:
863 case BUILT_IN_MEMSET_CHK:
864 case BUILT_IN_STRNCPY_CHK:
865 case BUILT_IN_STPNCPY_CHK:
866 arg_idx = 2;
867 type = 2;
868 break;
869 case BUILT_IN_STRCPY_CHK:
870 case BUILT_IN_STPCPY_CHK:
871 arg_idx = 1;
872 type = 1;
873 break;
874 case BUILT_IN_SNPRINTF_CHK:
875 case BUILT_IN_VSNPRINTF_CHK:
876 arg_idx = 1;
877 type = 2;
878 break;
879 default:
880 return NULL_TREE;
883 if (arg_idx >= nargs)
884 return NULL_TREE;
886 /* Try to use the dataflow information gathered by the CCP process. */
887 visited = BITMAP_ALLOC (NULL);
888 bitmap_clear (visited);
890 memset (val, 0, sizeof (val));
891 a = gimple_call_arg (stmt, arg_idx);
892 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
893 val[arg_idx] = NULL_TREE;
895 BITMAP_FREE (visited);
897 result = NULL_TREE;
898 switch (DECL_FUNCTION_CODE (callee))
900 case BUILT_IN_STRLEN:
901 if (val[0] && nargs == 1)
903 tree new_val =
904 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
906 /* If the result is not a valid gimple value, or not a cast
907 of a valid gimple value, then we cannot use the result. */
908 if (is_gimple_val (new_val)
909 || (CONVERT_EXPR_P (new_val)
910 && is_gimple_val (TREE_OPERAND (new_val, 0))))
911 return new_val;
913 break;
915 case BUILT_IN_STRCPY:
916 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
917 result = fold_builtin_strcpy (loc, callee,
918 gimple_call_arg (stmt, 0),
919 gimple_call_arg (stmt, 1),
920 val[1]);
921 break;
923 case BUILT_IN_STRNCPY:
924 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
925 result = fold_builtin_strncpy (loc, callee,
926 gimple_call_arg (stmt, 0),
927 gimple_call_arg (stmt, 1),
928 gimple_call_arg (stmt, 2),
929 val[1]);
930 break;
932 case BUILT_IN_FPUTS:
933 if (nargs == 2)
934 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
935 gimple_call_arg (stmt, 1),
936 ignore, false, val[0]);
937 break;
939 case BUILT_IN_FPUTS_UNLOCKED:
940 if (nargs == 2)
941 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
942 gimple_call_arg (stmt, 1),
943 ignore, true, val[0]);
944 break;
946 case BUILT_IN_MEMCPY_CHK:
947 case BUILT_IN_MEMPCPY_CHK:
948 case BUILT_IN_MEMMOVE_CHK:
949 case BUILT_IN_MEMSET_CHK:
950 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
951 result = fold_builtin_memory_chk (loc, callee,
952 gimple_call_arg (stmt, 0),
953 gimple_call_arg (stmt, 1),
954 gimple_call_arg (stmt, 2),
955 gimple_call_arg (stmt, 3),
956 val[2], ignore,
957 DECL_FUNCTION_CODE (callee));
958 break;
960 case BUILT_IN_STRCPY_CHK:
961 case BUILT_IN_STPCPY_CHK:
962 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
963 result = fold_builtin_stxcpy_chk (loc, callee,
964 gimple_call_arg (stmt, 0),
965 gimple_call_arg (stmt, 1),
966 gimple_call_arg (stmt, 2),
967 val[1], ignore,
968 DECL_FUNCTION_CODE (callee));
969 break;
971 case BUILT_IN_STRNCPY_CHK:
972 case BUILT_IN_STPNCPY_CHK:
973 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
974 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
975 gimple_call_arg (stmt, 1),
976 gimple_call_arg (stmt, 2),
977 gimple_call_arg (stmt, 3),
978 val[2], ignore,
979 DECL_FUNCTION_CODE (callee));
980 break;
982 case BUILT_IN_SNPRINTF_CHK:
983 case BUILT_IN_VSNPRINTF_CHK:
984 if (val[1] && is_gimple_val (val[1]))
985 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
986 DECL_FUNCTION_CODE (callee));
987 break;
989 default:
990 gcc_unreachable ();
993 if (result && ignore)
994 result = fold_ignored_result (result);
995 return result;
999 /* Return a binfo to be used for devirtualization of calls based on an object
1000 represented by a declaration (i.e. a global or automatically allocated one)
1001 or NULL if it cannot be found or is not safe. CST is expected to be an
1002 ADDR_EXPR of such object or the function will return NULL. Currently it is
1003 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1005 tree
1006 gimple_extract_devirt_binfo_from_cst (tree cst)
1008 HOST_WIDE_INT offset, size, max_size;
1009 tree base, type, expected_type, binfo;
1010 bool last_artificial = false;
1012 if (!flag_devirtualize
1013 || TREE_CODE (cst) != ADDR_EXPR
1014 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1015 return NULL_TREE;
1017 cst = TREE_OPERAND (cst, 0);
1018 expected_type = TREE_TYPE (cst);
1019 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1020 type = TREE_TYPE (base);
1021 if (!DECL_P (base)
1022 || max_size == -1
1023 || max_size != size
1024 || TREE_CODE (type) != RECORD_TYPE)
1025 return NULL_TREE;
1027 /* Find the sub-object the constant actually refers to and mark whether it is
1028 an artificial one (as opposed to a user-defined one). */
1029 while (true)
1031 HOST_WIDE_INT pos, size;
1032 tree fld;
1034 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1035 break;
1036 if (offset < 0)
1037 return NULL_TREE;
1039 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1041 if (TREE_CODE (fld) != FIELD_DECL)
1042 continue;
1044 pos = int_bit_position (fld);
1045 size = tree_low_cst (DECL_SIZE (fld), 1);
1046 if (pos <= offset && (pos + size) > offset)
1047 break;
1049 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1050 return NULL_TREE;
1052 last_artificial = DECL_ARTIFICIAL (fld);
1053 type = TREE_TYPE (fld);
1054 offset -= pos;
1056 /* Artificial sub-objects are ancestors, we do not want to use them for
1057 devirtualization, at least not here. */
1058 if (last_artificial)
1059 return NULL_TREE;
1060 binfo = TYPE_BINFO (type);
1061 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1062 return NULL_TREE;
1063 else
1064 return binfo;
1067 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1068 The statement may be replaced by another statement, e.g., if the call
1069 simplifies to a constant value. Return true if any changes were made.
1070 It is assumed that the operands have been previously folded. */
1072 static bool
1073 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1075 gimple stmt = gsi_stmt (*gsi);
1076 tree callee;
1077 bool changed = false;
1078 unsigned i;
1080 /* Fold *& in call arguments. */
1081 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1082 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1084 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1085 if (tmp)
1087 gimple_call_set_arg (stmt, i, tmp);
1088 changed = true;
1092 /* Check for virtual calls that became direct calls. */
1093 callee = gimple_call_fn (stmt);
1094 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1096 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1098 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1099 changed = true;
1101 else
1103 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1104 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1105 if (binfo)
1107 HOST_WIDE_INT token
1108 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1109 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1110 if (fndecl)
1112 gimple_call_set_fndecl (stmt, fndecl);
1113 changed = true;
1119 if (inplace)
1120 return changed;
1122 /* Check for builtins that CCP can handle using information not
1123 available in the generic fold routines. */
1124 callee = gimple_call_fndecl (stmt);
1125 if (callee && DECL_BUILT_IN (callee))
1127 tree result = gimple_fold_builtin (stmt);
1128 if (result)
1130 if (!update_call_from_tree (gsi, result))
1131 gimplify_and_update_call_from_tree (gsi, result);
1132 changed = true;
1136 return changed;
1139 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1140 distinguishes both cases. */
1142 static bool
1143 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1145 bool changed = false;
1146 gimple stmt = gsi_stmt (*gsi);
1147 unsigned i;
1148 gimple_stmt_iterator gsinext = *gsi;
1149 gimple next_stmt;
1151 gsi_next (&gsinext);
1152 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1154 /* Fold the main computation performed by the statement. */
1155 switch (gimple_code (stmt))
1157 case GIMPLE_ASSIGN:
1159 unsigned old_num_ops = gimple_num_ops (stmt);
1160 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1161 tree lhs = gimple_assign_lhs (stmt);
1162 tree new_rhs;
1163 /* First canonicalize operand order. This avoids building new
1164 trees if this is the only thing fold would later do. */
1165 if ((commutative_tree_code (subcode)
1166 || commutative_ternary_tree_code (subcode))
1167 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1168 gimple_assign_rhs2 (stmt), false))
1170 tree tem = gimple_assign_rhs1 (stmt);
1171 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1172 gimple_assign_set_rhs2 (stmt, tem);
1173 changed = true;
1175 new_rhs = fold_gimple_assign (gsi);
1176 if (new_rhs
1177 && !useless_type_conversion_p (TREE_TYPE (lhs),
1178 TREE_TYPE (new_rhs)))
1179 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1180 if (new_rhs
1181 && (!inplace
1182 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1184 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1185 changed = true;
1187 break;
1190 case GIMPLE_COND:
1191 changed |= fold_gimple_cond (stmt);
1192 break;
1194 case GIMPLE_CALL:
1195 changed |= gimple_fold_call (gsi, inplace);
1196 break;
1198 case GIMPLE_ASM:
1199 /* Fold *& in asm operands. */
1201 size_t noutputs;
1202 const char **oconstraints;
1203 const char *constraint;
1204 bool allows_mem, allows_reg;
1206 noutputs = gimple_asm_noutputs (stmt);
1207 oconstraints = XALLOCAVEC (const char *, noutputs);
1209 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1211 tree link = gimple_asm_output_op (stmt, i);
1212 tree op = TREE_VALUE (link);
1213 oconstraints[i]
1214 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1215 if (REFERENCE_CLASS_P (op)
1216 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1218 TREE_VALUE (link) = op;
1219 changed = true;
1222 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1224 tree link = gimple_asm_input_op (stmt, i);
1225 tree op = TREE_VALUE (link);
1226 constraint
1227 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1228 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1229 oconstraints, &allows_mem, &allows_reg);
1230 if (REFERENCE_CLASS_P (op)
1231 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1232 != NULL_TREE)
1234 TREE_VALUE (link) = op;
1235 changed = true;
1239 break;
1241 case GIMPLE_DEBUG:
1242 if (gimple_debug_bind_p (stmt))
1244 tree val = gimple_debug_bind_get_value (stmt);
1245 if (val
1246 && REFERENCE_CLASS_P (val))
1248 tree tem = maybe_fold_reference (val, false);
1249 if (tem)
1251 gimple_debug_bind_set_value (stmt, tem);
1252 changed = true;
1255 else if (val
1256 && TREE_CODE (val) == ADDR_EXPR)
1258 tree ref = TREE_OPERAND (val, 0);
1259 tree tem = maybe_fold_reference (ref, false);
1260 if (tem)
1262 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1263 gimple_debug_bind_set_value (stmt, tem);
1264 changed = true;
1268 break;
1270 default:;
1273 /* If stmt folds into nothing and it was the last stmt in a bb,
1274 don't call gsi_stmt. */
1275 if (gsi_end_p (*gsi))
1277 gcc_assert (next_stmt == NULL);
1278 return changed;
1281 stmt = gsi_stmt (*gsi);
1283 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1284 as we'd changing the next stmt. */
1285 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1287 tree lhs = gimple_get_lhs (stmt);
1288 if (lhs && REFERENCE_CLASS_P (lhs))
1290 tree new_lhs = maybe_fold_reference (lhs, true);
1291 if (new_lhs)
1293 gimple_set_lhs (stmt, new_lhs);
1294 changed = true;
1299 return changed;
1302 /* Fold the statement pointed to by GSI. In some cases, this function may
1303 replace the whole statement with a new one. Returns true iff folding
1304 makes any changes.
1305 The statement pointed to by GSI should be in valid gimple form but may
1306 be in unfolded state as resulting from for example constant propagation
1307 which can produce *&x = 0. */
1309 bool
1310 fold_stmt (gimple_stmt_iterator *gsi)
1312 return fold_stmt_1 (gsi, false);
1315 /* Perform the minimal folding on statement *GSI. Only operations like
1316 *&x created by constant propagation are handled. The statement cannot
1317 be replaced with a new one. Return true if the statement was
1318 changed, false otherwise.
1319 The statement *GSI should be in valid gimple form but may
1320 be in unfolded state as resulting from for example constant propagation
1321 which can produce *&x = 0. */
1323 bool
1324 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1326 gimple stmt = gsi_stmt (*gsi);
1327 bool changed = fold_stmt_1 (gsi, true);
1328 gcc_assert (gsi_stmt (*gsi) == stmt);
1329 return changed;
1332 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1333 if EXPR is null or we don't know how.
1334 If non-null, the result always has boolean type. */
1336 static tree
1337 canonicalize_bool (tree expr, bool invert)
1339 if (!expr)
1340 return NULL_TREE;
1341 else if (invert)
1343 if (integer_nonzerop (expr))
1344 return boolean_false_node;
1345 else if (integer_zerop (expr))
1346 return boolean_true_node;
1347 else if (TREE_CODE (expr) == SSA_NAME)
1348 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1349 build_int_cst (TREE_TYPE (expr), 0));
1350 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1351 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1352 boolean_type_node,
1353 TREE_OPERAND (expr, 0),
1354 TREE_OPERAND (expr, 1));
1355 else
1356 return NULL_TREE;
1358 else
1360 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1361 return expr;
1362 if (integer_nonzerop (expr))
1363 return boolean_true_node;
1364 else if (integer_zerop (expr))
1365 return boolean_false_node;
1366 else if (TREE_CODE (expr) == SSA_NAME)
1367 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1368 build_int_cst (TREE_TYPE (expr), 0));
1369 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1370 return fold_build2 (TREE_CODE (expr),
1371 boolean_type_node,
1372 TREE_OPERAND (expr, 0),
1373 TREE_OPERAND (expr, 1));
1374 else
1375 return NULL_TREE;
1379 /* Check to see if a boolean expression EXPR is logically equivalent to the
1380 comparison (OP1 CODE OP2). Check for various identities involving
1381 SSA_NAMEs. */
1383 static bool
1384 same_bool_comparison_p (const_tree expr, enum tree_code code,
1385 const_tree op1, const_tree op2)
1387 gimple s;
1389 /* The obvious case. */
1390 if (TREE_CODE (expr) == code
1391 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1392 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1393 return true;
1395 /* Check for comparing (name, name != 0) and the case where expr
1396 is an SSA_NAME with a definition matching the comparison. */
1397 if (TREE_CODE (expr) == SSA_NAME
1398 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1400 if (operand_equal_p (expr, op1, 0))
1401 return ((code == NE_EXPR && integer_zerop (op2))
1402 || (code == EQ_EXPR && integer_nonzerop (op2)));
1403 s = SSA_NAME_DEF_STMT (expr);
1404 if (is_gimple_assign (s)
1405 && gimple_assign_rhs_code (s) == code
1406 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1407 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1408 return true;
1411 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1412 of name is a comparison, recurse. */
1413 if (TREE_CODE (op1) == SSA_NAME
1414 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1416 s = SSA_NAME_DEF_STMT (op1);
1417 if (is_gimple_assign (s)
1418 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1420 enum tree_code c = gimple_assign_rhs_code (s);
1421 if ((c == NE_EXPR && integer_zerop (op2))
1422 || (c == EQ_EXPR && integer_nonzerop (op2)))
1423 return same_bool_comparison_p (expr, c,
1424 gimple_assign_rhs1 (s),
1425 gimple_assign_rhs2 (s));
1426 if ((c == EQ_EXPR && integer_zerop (op2))
1427 || (c == NE_EXPR && integer_nonzerop (op2)))
1428 return same_bool_comparison_p (expr,
1429 invert_tree_comparison (c, false),
1430 gimple_assign_rhs1 (s),
1431 gimple_assign_rhs2 (s));
1434 return false;
1437 /* Check to see if two boolean expressions OP1 and OP2 are logically
1438 equivalent. */
1440 static bool
1441 same_bool_result_p (const_tree op1, const_tree op2)
1443 /* Simple cases first. */
1444 if (operand_equal_p (op1, op2, 0))
1445 return true;
1447 /* Check the cases where at least one of the operands is a comparison.
1448 These are a bit smarter than operand_equal_p in that they apply some
1449 identifies on SSA_NAMEs. */
1450 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1451 && same_bool_comparison_p (op1, TREE_CODE (op2),
1452 TREE_OPERAND (op2, 0),
1453 TREE_OPERAND (op2, 1)))
1454 return true;
1455 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1456 && same_bool_comparison_p (op2, TREE_CODE (op1),
1457 TREE_OPERAND (op1, 0),
1458 TREE_OPERAND (op1, 1)))
1459 return true;
1461 /* Default case. */
1462 return false;
1465 /* Forward declarations for some mutually recursive functions. */
1467 static tree
1468 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1469 enum tree_code code2, tree op2a, tree op2b);
1470 static tree
1471 and_var_with_comparison (tree var, bool invert,
1472 enum tree_code code2, tree op2a, tree op2b);
1473 static tree
1474 and_var_with_comparison_1 (gimple stmt,
1475 enum tree_code code2, tree op2a, tree op2b);
1476 static tree
1477 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1478 enum tree_code code2, tree op2a, tree op2b);
1479 static tree
1480 or_var_with_comparison (tree var, bool invert,
1481 enum tree_code code2, tree op2a, tree op2b);
1482 static tree
1483 or_var_with_comparison_1 (gimple stmt,
1484 enum tree_code code2, tree op2a, tree op2b);
1486 /* Helper function for and_comparisons_1: try to simplify the AND of the
1487 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1488 If INVERT is true, invert the value of the VAR before doing the AND.
1489 Return NULL_EXPR if we can't simplify this to a single expression. */
1491 static tree
1492 and_var_with_comparison (tree var, bool invert,
1493 enum tree_code code2, tree op2a, tree op2b)
1495 tree t;
1496 gimple stmt = SSA_NAME_DEF_STMT (var);
1498 /* We can only deal with variables whose definitions are assignments. */
1499 if (!is_gimple_assign (stmt))
1500 return NULL_TREE;
1502 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1503 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1504 Then we only have to consider the simpler non-inverted cases. */
1505 if (invert)
1506 t = or_var_with_comparison_1 (stmt,
1507 invert_tree_comparison (code2, false),
1508 op2a, op2b);
1509 else
1510 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1511 return canonicalize_bool (t, invert);
1514 /* Try to simplify the AND of the ssa variable defined by the assignment
1515 STMT with the comparison specified by (OP2A CODE2 OP2B).
1516 Return NULL_EXPR if we can't simplify this to a single expression. */
1518 static tree
1519 and_var_with_comparison_1 (gimple stmt,
1520 enum tree_code code2, tree op2a, tree op2b)
1522 tree var = gimple_assign_lhs (stmt);
1523 tree true_test_var = NULL_TREE;
1524 tree false_test_var = NULL_TREE;
1525 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1527 /* Check for identities like (var AND (var == 0)) => false. */
1528 if (TREE_CODE (op2a) == SSA_NAME
1529 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1531 if ((code2 == NE_EXPR && integer_zerop (op2b))
1532 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1534 true_test_var = op2a;
1535 if (var == true_test_var)
1536 return var;
1538 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1539 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1541 false_test_var = op2a;
1542 if (var == false_test_var)
1543 return boolean_false_node;
1547 /* If the definition is a comparison, recurse on it. */
1548 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1550 tree t = and_comparisons_1 (innercode,
1551 gimple_assign_rhs1 (stmt),
1552 gimple_assign_rhs2 (stmt),
1553 code2,
1554 op2a,
1555 op2b);
1556 if (t)
1557 return t;
1560 /* If the definition is an AND or OR expression, we may be able to
1561 simplify by reassociating. */
1562 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1563 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1565 tree inner1 = gimple_assign_rhs1 (stmt);
1566 tree inner2 = gimple_assign_rhs2 (stmt);
1567 gimple s;
1568 tree t;
1569 tree partial = NULL_TREE;
1570 bool is_and = (innercode == BIT_AND_EXPR);
1572 /* Check for boolean identities that don't require recursive examination
1573 of inner1/inner2:
1574 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1575 inner1 AND (inner1 OR inner2) => inner1
1576 !inner1 AND (inner1 AND inner2) => false
1577 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1578 Likewise for similar cases involving inner2. */
1579 if (inner1 == true_test_var)
1580 return (is_and ? var : inner1);
1581 else if (inner2 == true_test_var)
1582 return (is_and ? var : inner2);
1583 else if (inner1 == false_test_var)
1584 return (is_and
1585 ? boolean_false_node
1586 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1587 else if (inner2 == false_test_var)
1588 return (is_and
1589 ? boolean_false_node
1590 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1592 /* Next, redistribute/reassociate the AND across the inner tests.
1593 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1594 if (TREE_CODE (inner1) == SSA_NAME
1595 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1596 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1597 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1598 gimple_assign_rhs1 (s),
1599 gimple_assign_rhs2 (s),
1600 code2, op2a, op2b)))
1602 /* Handle the AND case, where we are reassociating:
1603 (inner1 AND inner2) AND (op2a code2 op2b)
1604 => (t AND inner2)
1605 If the partial result t is a constant, we win. Otherwise
1606 continue on to try reassociating with the other inner test. */
1607 if (is_and)
1609 if (integer_onep (t))
1610 return inner2;
1611 else if (integer_zerop (t))
1612 return boolean_false_node;
1615 /* Handle the OR case, where we are redistributing:
1616 (inner1 OR inner2) AND (op2a code2 op2b)
1617 => (t OR (inner2 AND (op2a code2 op2b))) */
1618 else if (integer_onep (t))
1619 return boolean_true_node;
1621 /* Save partial result for later. */
1622 partial = t;
1625 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1626 if (TREE_CODE (inner2) == SSA_NAME
1627 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1628 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1629 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1630 gimple_assign_rhs1 (s),
1631 gimple_assign_rhs2 (s),
1632 code2, op2a, op2b)))
1634 /* Handle the AND case, where we are reassociating:
1635 (inner1 AND inner2) AND (op2a code2 op2b)
1636 => (inner1 AND t) */
1637 if (is_and)
1639 if (integer_onep (t))
1640 return inner1;
1641 else if (integer_zerop (t))
1642 return boolean_false_node;
1643 /* If both are the same, we can apply the identity
1644 (x AND x) == x. */
1645 else if (partial && same_bool_result_p (t, partial))
1646 return t;
1649 /* Handle the OR case. where we are redistributing:
1650 (inner1 OR inner2) AND (op2a code2 op2b)
1651 => (t OR (inner1 AND (op2a code2 op2b)))
1652 => (t OR partial) */
1653 else
1655 if (integer_onep (t))
1656 return boolean_true_node;
1657 else if (partial)
1659 /* We already got a simplification for the other
1660 operand to the redistributed OR expression. The
1661 interesting case is when at least one is false.
1662 Or, if both are the same, we can apply the identity
1663 (x OR x) == x. */
1664 if (integer_zerop (partial))
1665 return t;
1666 else if (integer_zerop (t))
1667 return partial;
1668 else if (same_bool_result_p (t, partial))
1669 return t;
1674 return NULL_TREE;
1677 /* Try to simplify the AND of two comparisons defined by
1678 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1679 If this can be done without constructing an intermediate value,
1680 return the resulting tree; otherwise NULL_TREE is returned.
1681 This function is deliberately asymmetric as it recurses on SSA_DEFs
1682 in the first comparison but not the second. */
1684 static tree
1685 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1686 enum tree_code code2, tree op2a, tree op2b)
1688 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1689 if (operand_equal_p (op1a, op2a, 0)
1690 && operand_equal_p (op1b, op2b, 0))
1692 /* Result will be either NULL_TREE, or a combined comparison. */
1693 tree t = combine_comparisons (UNKNOWN_LOCATION,
1694 TRUTH_ANDIF_EXPR, code1, code2,
1695 boolean_type_node, op1a, op1b);
1696 if (t)
1697 return t;
1700 /* Likewise the swapped case of the above. */
1701 if (operand_equal_p (op1a, op2b, 0)
1702 && operand_equal_p (op1b, op2a, 0))
1704 /* Result will be either NULL_TREE, or a combined comparison. */
1705 tree t = combine_comparisons (UNKNOWN_LOCATION,
1706 TRUTH_ANDIF_EXPR, code1,
1707 swap_tree_comparison (code2),
1708 boolean_type_node, op1a, op1b);
1709 if (t)
1710 return t;
1713 /* If both comparisons are of the same value against constants, we might
1714 be able to merge them. */
1715 if (operand_equal_p (op1a, op2a, 0)
1716 && TREE_CODE (op1b) == INTEGER_CST
1717 && TREE_CODE (op2b) == INTEGER_CST)
1719 int cmp = tree_int_cst_compare (op1b, op2b);
1721 /* If we have (op1a == op1b), we should either be able to
1722 return that or FALSE, depending on whether the constant op1b
1723 also satisfies the other comparison against op2b. */
1724 if (code1 == EQ_EXPR)
1726 bool done = true;
1727 bool val;
1728 switch (code2)
1730 case EQ_EXPR: val = (cmp == 0); break;
1731 case NE_EXPR: val = (cmp != 0); break;
1732 case LT_EXPR: val = (cmp < 0); break;
1733 case GT_EXPR: val = (cmp > 0); break;
1734 case LE_EXPR: val = (cmp <= 0); break;
1735 case GE_EXPR: val = (cmp >= 0); break;
1736 default: done = false;
1738 if (done)
1740 if (val)
1741 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1742 else
1743 return boolean_false_node;
1746 /* Likewise if the second comparison is an == comparison. */
1747 else if (code2 == EQ_EXPR)
1749 bool done = true;
1750 bool val;
1751 switch (code1)
1753 case EQ_EXPR: val = (cmp == 0); break;
1754 case NE_EXPR: val = (cmp != 0); break;
1755 case LT_EXPR: val = (cmp > 0); break;
1756 case GT_EXPR: val = (cmp < 0); break;
1757 case LE_EXPR: val = (cmp >= 0); break;
1758 case GE_EXPR: val = (cmp <= 0); break;
1759 default: done = false;
1761 if (done)
1763 if (val)
1764 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1765 else
1766 return boolean_false_node;
1770 /* Same business with inequality tests. */
1771 else if (code1 == NE_EXPR)
1773 bool val;
1774 switch (code2)
1776 case EQ_EXPR: val = (cmp != 0); break;
1777 case NE_EXPR: val = (cmp == 0); break;
1778 case LT_EXPR: val = (cmp >= 0); break;
1779 case GT_EXPR: val = (cmp <= 0); break;
1780 case LE_EXPR: val = (cmp > 0); break;
1781 case GE_EXPR: val = (cmp < 0); break;
1782 default:
1783 val = false;
1785 if (val)
1786 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1788 else if (code2 == NE_EXPR)
1790 bool val;
1791 switch (code1)
1793 case EQ_EXPR: val = (cmp == 0); break;
1794 case NE_EXPR: val = (cmp != 0); break;
1795 case LT_EXPR: val = (cmp <= 0); break;
1796 case GT_EXPR: val = (cmp >= 0); break;
1797 case LE_EXPR: val = (cmp < 0); break;
1798 case GE_EXPR: val = (cmp > 0); break;
1799 default:
1800 val = false;
1802 if (val)
1803 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1806 /* Chose the more restrictive of two < or <= comparisons. */
1807 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1808 && (code2 == LT_EXPR || code2 == LE_EXPR))
1810 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1811 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1812 else
1813 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1816 /* Likewise chose the more restrictive of two > or >= comparisons. */
1817 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1818 && (code2 == GT_EXPR || code2 == GE_EXPR))
1820 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1821 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1822 else
1823 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1826 /* Check for singleton ranges. */
1827 else if (cmp == 0
1828 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1829 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1830 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1832 /* Check for disjoint ranges. */
1833 else if (cmp <= 0
1834 && (code1 == LT_EXPR || code1 == LE_EXPR)
1835 && (code2 == GT_EXPR || code2 == GE_EXPR))
1836 return boolean_false_node;
1837 else if (cmp >= 0
1838 && (code1 == GT_EXPR || code1 == GE_EXPR)
1839 && (code2 == LT_EXPR || code2 == LE_EXPR))
1840 return boolean_false_node;
1843 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1844 NAME's definition is a truth value. See if there are any simplifications
1845 that can be done against the NAME's definition. */
1846 if (TREE_CODE (op1a) == SSA_NAME
1847 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1848 && (integer_zerop (op1b) || integer_onep (op1b)))
1850 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1851 || (code1 == NE_EXPR && integer_onep (op1b)));
1852 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1853 switch (gimple_code (stmt))
1855 case GIMPLE_ASSIGN:
1856 /* Try to simplify by copy-propagating the definition. */
1857 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1859 case GIMPLE_PHI:
1860 /* If every argument to the PHI produces the same result when
1861 ANDed with the second comparison, we win.
1862 Do not do this unless the type is bool since we need a bool
1863 result here anyway. */
1864 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1866 tree result = NULL_TREE;
1867 unsigned i;
1868 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1870 tree arg = gimple_phi_arg_def (stmt, i);
1872 /* If this PHI has itself as an argument, ignore it.
1873 If all the other args produce the same result,
1874 we're still OK. */
1875 if (arg == gimple_phi_result (stmt))
1876 continue;
1877 else if (TREE_CODE (arg) == INTEGER_CST)
1879 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1881 if (!result)
1882 result = boolean_false_node;
1883 else if (!integer_zerop (result))
1884 return NULL_TREE;
1886 else if (!result)
1887 result = fold_build2 (code2, boolean_type_node,
1888 op2a, op2b);
1889 else if (!same_bool_comparison_p (result,
1890 code2, op2a, op2b))
1891 return NULL_TREE;
1893 else if (TREE_CODE (arg) == SSA_NAME
1894 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1896 tree temp;
1897 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1898 /* In simple cases we can look through PHI nodes,
1899 but we have to be careful with loops.
1900 See PR49073. */
1901 if (! dom_info_available_p (CDI_DOMINATORS)
1902 || gimple_bb (def_stmt) == gimple_bb (stmt)
1903 || dominated_by_p (CDI_DOMINATORS,
1904 gimple_bb (def_stmt),
1905 gimple_bb (stmt)))
1906 return NULL_TREE;
1907 temp = and_var_with_comparison (arg, invert, code2,
1908 op2a, op2b);
1909 if (!temp)
1910 return NULL_TREE;
1911 else if (!result)
1912 result = temp;
1913 else if (!same_bool_result_p (result, temp))
1914 return NULL_TREE;
1916 else
1917 return NULL_TREE;
1919 return result;
1922 default:
1923 break;
1926 return NULL_TREE;
1929 /* Try to simplify the AND of two comparisons, specified by
1930 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1931 If this can be simplified to a single expression (without requiring
1932 introducing more SSA variables to hold intermediate values),
1933 return the resulting tree. Otherwise return NULL_TREE.
1934 If the result expression is non-null, it has boolean type. */
1936 tree
1937 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1938 enum tree_code code2, tree op2a, tree op2b)
1940 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1941 if (t)
1942 return t;
1943 else
1944 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1947 /* Helper function for or_comparisons_1: try to simplify the OR of the
1948 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1949 If INVERT is true, invert the value of VAR before doing the OR.
1950 Return NULL_EXPR if we can't simplify this to a single expression. */
1952 static tree
1953 or_var_with_comparison (tree var, bool invert,
1954 enum tree_code code2, tree op2a, tree op2b)
1956 tree t;
1957 gimple stmt = SSA_NAME_DEF_STMT (var);
1959 /* We can only deal with variables whose definitions are assignments. */
1960 if (!is_gimple_assign (stmt))
1961 return NULL_TREE;
1963 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1964 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1965 Then we only have to consider the simpler non-inverted cases. */
1966 if (invert)
1967 t = and_var_with_comparison_1 (stmt,
1968 invert_tree_comparison (code2, false),
1969 op2a, op2b);
1970 else
1971 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1972 return canonicalize_bool (t, invert);
1975 /* Try to simplify the OR of the ssa variable defined by the assignment
1976 STMT with the comparison specified by (OP2A CODE2 OP2B).
1977 Return NULL_EXPR if we can't simplify this to a single expression. */
1979 static tree
1980 or_var_with_comparison_1 (gimple stmt,
1981 enum tree_code code2, tree op2a, tree op2b)
1983 tree var = gimple_assign_lhs (stmt);
1984 tree true_test_var = NULL_TREE;
1985 tree false_test_var = NULL_TREE;
1986 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1988 /* Check for identities like (var OR (var != 0)) => true . */
1989 if (TREE_CODE (op2a) == SSA_NAME
1990 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1992 if ((code2 == NE_EXPR && integer_zerop (op2b))
1993 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1995 true_test_var = op2a;
1996 if (var == true_test_var)
1997 return var;
1999 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2000 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2002 false_test_var = op2a;
2003 if (var == false_test_var)
2004 return boolean_true_node;
2008 /* If the definition is a comparison, recurse on it. */
2009 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2011 tree t = or_comparisons_1 (innercode,
2012 gimple_assign_rhs1 (stmt),
2013 gimple_assign_rhs2 (stmt),
2014 code2,
2015 op2a,
2016 op2b);
2017 if (t)
2018 return t;
2021 /* If the definition is an AND or OR expression, we may be able to
2022 simplify by reassociating. */
2023 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2024 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2026 tree inner1 = gimple_assign_rhs1 (stmt);
2027 tree inner2 = gimple_assign_rhs2 (stmt);
2028 gimple s;
2029 tree t;
2030 tree partial = NULL_TREE;
2031 bool is_or = (innercode == BIT_IOR_EXPR);
2033 /* Check for boolean identities that don't require recursive examination
2034 of inner1/inner2:
2035 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2036 inner1 OR (inner1 AND inner2) => inner1
2037 !inner1 OR (inner1 OR inner2) => true
2038 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2040 if (inner1 == true_test_var)
2041 return (is_or ? var : inner1);
2042 else if (inner2 == true_test_var)
2043 return (is_or ? var : inner2);
2044 else if (inner1 == false_test_var)
2045 return (is_or
2046 ? boolean_true_node
2047 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2048 else if (inner2 == false_test_var)
2049 return (is_or
2050 ? boolean_true_node
2051 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2053 /* Next, redistribute/reassociate the OR across the inner tests.
2054 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2055 if (TREE_CODE (inner1) == SSA_NAME
2056 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2057 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2058 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2059 gimple_assign_rhs1 (s),
2060 gimple_assign_rhs2 (s),
2061 code2, op2a, op2b)))
2063 /* Handle the OR case, where we are reassociating:
2064 (inner1 OR inner2) OR (op2a code2 op2b)
2065 => (t OR inner2)
2066 If the partial result t is a constant, we win. Otherwise
2067 continue on to try reassociating with the other inner test. */
2068 if (is_or)
2070 if (integer_onep (t))
2071 return boolean_true_node;
2072 else if (integer_zerop (t))
2073 return inner2;
2076 /* Handle the AND case, where we are redistributing:
2077 (inner1 AND inner2) OR (op2a code2 op2b)
2078 => (t AND (inner2 OR (op2a code op2b))) */
2079 else if (integer_zerop (t))
2080 return boolean_false_node;
2082 /* Save partial result for later. */
2083 partial = t;
2086 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2087 if (TREE_CODE (inner2) == SSA_NAME
2088 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2089 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2090 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2091 gimple_assign_rhs1 (s),
2092 gimple_assign_rhs2 (s),
2093 code2, op2a, op2b)))
2095 /* Handle the OR case, where we are reassociating:
2096 (inner1 OR inner2) OR (op2a code2 op2b)
2097 => (inner1 OR t)
2098 => (t OR partial) */
2099 if (is_or)
2101 if (integer_zerop (t))
2102 return inner1;
2103 else if (integer_onep (t))
2104 return boolean_true_node;
2105 /* If both are the same, we can apply the identity
2106 (x OR x) == x. */
2107 else if (partial && same_bool_result_p (t, partial))
2108 return t;
2111 /* Handle the AND case, where we are redistributing:
2112 (inner1 AND inner2) OR (op2a code2 op2b)
2113 => (t AND (inner1 OR (op2a code2 op2b)))
2114 => (t AND partial) */
2115 else
2117 if (integer_zerop (t))
2118 return boolean_false_node;
2119 else if (partial)
2121 /* We already got a simplification for the other
2122 operand to the redistributed AND expression. The
2123 interesting case is when at least one is true.
2124 Or, if both are the same, we can apply the identity
2125 (x AND x) == x. */
2126 if (integer_onep (partial))
2127 return t;
2128 else if (integer_onep (t))
2129 return partial;
2130 else if (same_bool_result_p (t, partial))
2131 return t;
2136 return NULL_TREE;
2139 /* Try to simplify the OR of two comparisons defined by
2140 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2141 If this can be done without constructing an intermediate value,
2142 return the resulting tree; otherwise NULL_TREE is returned.
2143 This function is deliberately asymmetric as it recurses on SSA_DEFs
2144 in the first comparison but not the second. */
2146 static tree
2147 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2148 enum tree_code code2, tree op2a, tree op2b)
2150 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2151 if (operand_equal_p (op1a, op2a, 0)
2152 && operand_equal_p (op1b, op2b, 0))
2154 /* Result will be either NULL_TREE, or a combined comparison. */
2155 tree t = combine_comparisons (UNKNOWN_LOCATION,
2156 TRUTH_ORIF_EXPR, code1, code2,
2157 boolean_type_node, op1a, op1b);
2158 if (t)
2159 return t;
2162 /* Likewise the swapped case of the above. */
2163 if (operand_equal_p (op1a, op2b, 0)
2164 && operand_equal_p (op1b, op2a, 0))
2166 /* Result will be either NULL_TREE, or a combined comparison. */
2167 tree t = combine_comparisons (UNKNOWN_LOCATION,
2168 TRUTH_ORIF_EXPR, code1,
2169 swap_tree_comparison (code2),
2170 boolean_type_node, op1a, op1b);
2171 if (t)
2172 return t;
2175 /* If both comparisons are of the same value against constants, we might
2176 be able to merge them. */
2177 if (operand_equal_p (op1a, op2a, 0)
2178 && TREE_CODE (op1b) == INTEGER_CST
2179 && TREE_CODE (op2b) == INTEGER_CST)
2181 int cmp = tree_int_cst_compare (op1b, op2b);
2183 /* If we have (op1a != op1b), we should either be able to
2184 return that or TRUE, depending on whether the constant op1b
2185 also satisfies the other comparison against op2b. */
2186 if (code1 == NE_EXPR)
2188 bool done = true;
2189 bool val;
2190 switch (code2)
2192 case EQ_EXPR: val = (cmp == 0); break;
2193 case NE_EXPR: val = (cmp != 0); break;
2194 case LT_EXPR: val = (cmp < 0); break;
2195 case GT_EXPR: val = (cmp > 0); break;
2196 case LE_EXPR: val = (cmp <= 0); break;
2197 case GE_EXPR: val = (cmp >= 0); break;
2198 default: done = false;
2200 if (done)
2202 if (val)
2203 return boolean_true_node;
2204 else
2205 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2208 /* Likewise if the second comparison is a != comparison. */
2209 else if (code2 == NE_EXPR)
2211 bool done = true;
2212 bool val;
2213 switch (code1)
2215 case EQ_EXPR: val = (cmp == 0); break;
2216 case NE_EXPR: val = (cmp != 0); break;
2217 case LT_EXPR: val = (cmp > 0); break;
2218 case GT_EXPR: val = (cmp < 0); break;
2219 case LE_EXPR: val = (cmp >= 0); break;
2220 case GE_EXPR: val = (cmp <= 0); break;
2221 default: done = false;
2223 if (done)
2225 if (val)
2226 return boolean_true_node;
2227 else
2228 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2232 /* See if an equality test is redundant with the other comparison. */
2233 else if (code1 == EQ_EXPR)
2235 bool val;
2236 switch (code2)
2238 case EQ_EXPR: val = (cmp == 0); break;
2239 case NE_EXPR: val = (cmp != 0); break;
2240 case LT_EXPR: val = (cmp < 0); break;
2241 case GT_EXPR: val = (cmp > 0); break;
2242 case LE_EXPR: val = (cmp <= 0); break;
2243 case GE_EXPR: val = (cmp >= 0); break;
2244 default:
2245 val = false;
2247 if (val)
2248 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2250 else if (code2 == EQ_EXPR)
2252 bool val;
2253 switch (code1)
2255 case EQ_EXPR: val = (cmp == 0); break;
2256 case NE_EXPR: val = (cmp != 0); break;
2257 case LT_EXPR: val = (cmp > 0); break;
2258 case GT_EXPR: val = (cmp < 0); break;
2259 case LE_EXPR: val = (cmp >= 0); break;
2260 case GE_EXPR: val = (cmp <= 0); break;
2261 default:
2262 val = false;
2264 if (val)
2265 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2268 /* Chose the less restrictive of two < or <= comparisons. */
2269 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2270 && (code2 == LT_EXPR || code2 == LE_EXPR))
2272 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2273 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2274 else
2275 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2278 /* Likewise chose the less restrictive of two > or >= comparisons. */
2279 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2280 && (code2 == GT_EXPR || code2 == GE_EXPR))
2282 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2283 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2284 else
2285 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2288 /* Check for singleton ranges. */
2289 else if (cmp == 0
2290 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2291 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2292 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2294 /* Check for less/greater pairs that don't restrict the range at all. */
2295 else if (cmp >= 0
2296 && (code1 == LT_EXPR || code1 == LE_EXPR)
2297 && (code2 == GT_EXPR || code2 == GE_EXPR))
2298 return boolean_true_node;
2299 else if (cmp <= 0
2300 && (code1 == GT_EXPR || code1 == GE_EXPR)
2301 && (code2 == LT_EXPR || code2 == LE_EXPR))
2302 return boolean_true_node;
2305 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2306 NAME's definition is a truth value. See if there are any simplifications
2307 that can be done against the NAME's definition. */
2308 if (TREE_CODE (op1a) == SSA_NAME
2309 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2310 && (integer_zerop (op1b) || integer_onep (op1b)))
2312 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2313 || (code1 == NE_EXPR && integer_onep (op1b)));
2314 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2315 switch (gimple_code (stmt))
2317 case GIMPLE_ASSIGN:
2318 /* Try to simplify by copy-propagating the definition. */
2319 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2321 case GIMPLE_PHI:
2322 /* If every argument to the PHI produces the same result when
2323 ORed with the second comparison, we win.
2324 Do not do this unless the type is bool since we need a bool
2325 result here anyway. */
2326 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2328 tree result = NULL_TREE;
2329 unsigned i;
2330 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2332 tree arg = gimple_phi_arg_def (stmt, i);
2334 /* If this PHI has itself as an argument, ignore it.
2335 If all the other args produce the same result,
2336 we're still OK. */
2337 if (arg == gimple_phi_result (stmt))
2338 continue;
2339 else if (TREE_CODE (arg) == INTEGER_CST)
2341 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2343 if (!result)
2344 result = boolean_true_node;
2345 else if (!integer_onep (result))
2346 return NULL_TREE;
2348 else if (!result)
2349 result = fold_build2 (code2, boolean_type_node,
2350 op2a, op2b);
2351 else if (!same_bool_comparison_p (result,
2352 code2, op2a, op2b))
2353 return NULL_TREE;
2355 else if (TREE_CODE (arg) == SSA_NAME
2356 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2358 tree temp;
2359 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2360 /* In simple cases we can look through PHI nodes,
2361 but we have to be careful with loops.
2362 See PR49073. */
2363 if (! dom_info_available_p (CDI_DOMINATORS)
2364 || gimple_bb (def_stmt) == gimple_bb (stmt)
2365 || dominated_by_p (CDI_DOMINATORS,
2366 gimple_bb (def_stmt),
2367 gimple_bb (stmt)))
2368 return NULL_TREE;
2369 temp = or_var_with_comparison (arg, invert, code2,
2370 op2a, op2b);
2371 if (!temp)
2372 return NULL_TREE;
2373 else if (!result)
2374 result = temp;
2375 else if (!same_bool_result_p (result, temp))
2376 return NULL_TREE;
2378 else
2379 return NULL_TREE;
2381 return result;
2384 default:
2385 break;
2388 return NULL_TREE;
2391 /* Try to simplify the OR of two comparisons, specified by
2392 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2393 If this can be simplified to a single expression (without requiring
2394 introducing more SSA variables to hold intermediate values),
2395 return the resulting tree. Otherwise return NULL_TREE.
2396 If the result expression is non-null, it has boolean type. */
2398 tree
2399 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2400 enum tree_code code2, tree op2a, tree op2b)
2402 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2403 if (t)
2404 return t;
2405 else
2406 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2410 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2412 Either NULL_TREE, a simplified but non-constant or a constant
2413 is returned.
2415 ??? This should go into a gimple-fold-inline.h file to be eventually
2416 privatized with the single valueize function used in the various TUs
2417 to avoid the indirect function call overhead. */
2419 tree
2420 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2422 location_t loc = gimple_location (stmt);
2423 switch (gimple_code (stmt))
2425 case GIMPLE_ASSIGN:
2427 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2429 switch (get_gimple_rhs_class (subcode))
2431 case GIMPLE_SINGLE_RHS:
2433 tree rhs = gimple_assign_rhs1 (stmt);
2434 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2436 if (TREE_CODE (rhs) == SSA_NAME)
2438 /* If the RHS is an SSA_NAME, return its known constant value,
2439 if any. */
2440 return (*valueize) (rhs);
2442 /* Handle propagating invariant addresses into address
2443 operations. */
2444 else if (TREE_CODE (rhs) == ADDR_EXPR
2445 && !is_gimple_min_invariant (rhs))
2447 HOST_WIDE_INT offset = 0;
2448 tree base;
2449 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2450 &offset,
2451 valueize);
2452 if (base
2453 && (CONSTANT_CLASS_P (base)
2454 || decl_address_invariant_p (base)))
2455 return build_invariant_address (TREE_TYPE (rhs),
2456 base, offset);
2458 else if (TREE_CODE (rhs) == CONSTRUCTOR
2459 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2460 && (CONSTRUCTOR_NELTS (rhs)
2461 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2463 unsigned i;
2464 tree val, *vec;
2466 vec = XALLOCAVEC (tree,
2467 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2468 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2470 val = (*valueize) (val);
2471 if (TREE_CODE (val) == INTEGER_CST
2472 || TREE_CODE (val) == REAL_CST
2473 || TREE_CODE (val) == FIXED_CST)
2474 vec[i] = val;
2475 else
2476 return NULL_TREE;
2479 return build_vector (TREE_TYPE (rhs), vec);
2482 if (kind == tcc_reference)
2484 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2485 || TREE_CODE (rhs) == REALPART_EXPR
2486 || TREE_CODE (rhs) == IMAGPART_EXPR)
2487 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2489 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2490 return fold_unary_loc (EXPR_LOCATION (rhs),
2491 TREE_CODE (rhs),
2492 TREE_TYPE (rhs), val);
2494 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2495 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2497 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2498 return fold_ternary_loc (EXPR_LOCATION (rhs),
2499 TREE_CODE (rhs),
2500 TREE_TYPE (rhs), val,
2501 TREE_OPERAND (rhs, 1),
2502 TREE_OPERAND (rhs, 2));
2504 else if (TREE_CODE (rhs) == MEM_REF
2505 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2507 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2508 if (TREE_CODE (val) == ADDR_EXPR
2509 && is_gimple_min_invariant (val))
2511 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2512 unshare_expr (val),
2513 TREE_OPERAND (rhs, 1));
2514 if (tem)
2515 rhs = tem;
2518 return fold_const_aggregate_ref_1 (rhs, valueize);
2520 else if (kind == tcc_declaration)
2521 return get_symbol_constant_value (rhs);
2522 return rhs;
2525 case GIMPLE_UNARY_RHS:
2527 /* Handle unary operators that can appear in GIMPLE form.
2528 Note that we know the single operand must be a constant,
2529 so this should almost always return a simplified RHS. */
2530 tree lhs = gimple_assign_lhs (stmt);
2531 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2533 /* Conversions are useless for CCP purposes if they are
2534 value-preserving. Thus the restrictions that
2535 useless_type_conversion_p places for restrict qualification
2536 of pointer types should not apply here.
2537 Substitution later will only substitute to allowed places. */
2538 if (CONVERT_EXPR_CODE_P (subcode)
2539 && POINTER_TYPE_P (TREE_TYPE (lhs))
2540 && POINTER_TYPE_P (TREE_TYPE (op0))
2541 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2542 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2543 && TYPE_MODE (TREE_TYPE (lhs))
2544 == TYPE_MODE (TREE_TYPE (op0)))
2545 return op0;
2547 return
2548 fold_unary_ignore_overflow_loc (loc, subcode,
2549 gimple_expr_type (stmt), op0);
2552 case GIMPLE_BINARY_RHS:
2554 /* Handle binary operators that can appear in GIMPLE form. */
2555 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2556 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2558 /* Translate &x + CST into an invariant form suitable for
2559 further propagation. */
2560 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2561 && TREE_CODE (op0) == ADDR_EXPR
2562 && TREE_CODE (op1) == INTEGER_CST)
2564 tree off = fold_convert (ptr_type_node, op1);
2565 return build_fold_addr_expr_loc
2566 (loc,
2567 fold_build2 (MEM_REF,
2568 TREE_TYPE (TREE_TYPE (op0)),
2569 unshare_expr (op0), off));
2572 return fold_binary_loc (loc, subcode,
2573 gimple_expr_type (stmt), op0, op1);
2576 case GIMPLE_TERNARY_RHS:
2578 /* Handle ternary operators that can appear in GIMPLE form. */
2579 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2580 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2581 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2583 /* Fold embedded expressions in ternary codes. */
2584 if ((subcode == COND_EXPR
2585 || subcode == VEC_COND_EXPR)
2586 && COMPARISON_CLASS_P (op0))
2588 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2589 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2590 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2591 TREE_TYPE (op0), op00, op01);
2592 if (tem)
2593 op0 = tem;
2596 return fold_ternary_loc (loc, subcode,
2597 gimple_expr_type (stmt), op0, op1, op2);
2600 default:
2601 gcc_unreachable ();
2605 case GIMPLE_CALL:
2607 tree fn;
2609 if (gimple_call_internal_p (stmt))
2610 /* No folding yet for these functions. */
2611 return NULL_TREE;
2613 fn = (*valueize) (gimple_call_fn (stmt));
2614 if (TREE_CODE (fn) == ADDR_EXPR
2615 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2616 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2618 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2619 tree call, retval;
2620 unsigned i;
2621 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2622 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2623 call = build_call_array_loc (loc,
2624 gimple_call_return_type (stmt),
2625 fn, gimple_call_num_args (stmt), args);
2626 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2627 if (retval)
2628 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2629 STRIP_NOPS (retval);
2630 return retval;
2632 return NULL_TREE;
2635 default:
2636 return NULL_TREE;
2640 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2641 Returns NULL_TREE if folding to a constant is not possible, otherwise
2642 returns a constant according to is_gimple_min_invariant. */
2644 tree
2645 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2647 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2648 if (res && is_gimple_min_invariant (res))
2649 return res;
2650 return NULL_TREE;
2654 /* The following set of functions are supposed to fold references using
2655 their constant initializers. */
2657 static tree fold_ctor_reference (tree type, tree ctor,
2658 unsigned HOST_WIDE_INT offset,
2659 unsigned HOST_WIDE_INT size, tree);
2661 /* See if we can find constructor defining value of BASE.
2662 When we know the consructor with constant offset (such as
2663 base is array[40] and we do know constructor of array), then
2664 BIT_OFFSET is adjusted accordingly.
2666 As a special case, return error_mark_node when constructor
2667 is not explicitly available, but it is known to be zero
2668 such as 'static const int a;'. */
2669 static tree
2670 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2671 tree (*valueize)(tree))
2673 HOST_WIDE_INT bit_offset2, size, max_size;
2674 if (TREE_CODE (base) == MEM_REF)
2676 if (!integer_zerop (TREE_OPERAND (base, 1)))
2678 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2679 return NULL_TREE;
2680 *bit_offset += (mem_ref_offset (base).low
2681 * BITS_PER_UNIT);
2684 if (valueize
2685 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2686 base = valueize (TREE_OPERAND (base, 0));
2687 if (!base || TREE_CODE (base) != ADDR_EXPR)
2688 return NULL_TREE;
2689 base = TREE_OPERAND (base, 0);
2692 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2693 DECL_INITIAL. If BASE is a nested reference into another
2694 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2695 the inner reference. */
2696 switch (TREE_CODE (base))
2698 case VAR_DECL:
2699 if (!const_value_known_p (base))
2700 return NULL_TREE;
2702 /* Fallthru. */
2703 case CONST_DECL:
2704 if (!DECL_INITIAL (base)
2705 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2706 return error_mark_node;
2707 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2708 as special marker (_not_ zero ...) for its own purposes. */
2709 if (DECL_INITIAL (base) == error_mark_node)
2710 return NULL_TREE;
2711 return DECL_INITIAL (base);
2713 case ARRAY_REF:
2714 case COMPONENT_REF:
2715 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2716 if (max_size == -1 || size != max_size)
2717 return NULL_TREE;
2718 *bit_offset += bit_offset2;
2719 return get_base_constructor (base, bit_offset, valueize);
2721 case STRING_CST:
2722 case CONSTRUCTOR:
2723 return base;
2725 default:
2726 return NULL_TREE;
2730 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2731 to the memory at bit OFFSET.
2733 We do only simple job of folding byte accesses. */
2735 static tree
2736 fold_string_cst_ctor_reference (tree type, tree ctor,
2737 unsigned HOST_WIDE_INT offset,
2738 unsigned HOST_WIDE_INT size)
2740 if (INTEGRAL_TYPE_P (type)
2741 && (TYPE_MODE (type)
2742 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2743 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2744 == MODE_INT)
2745 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2746 && size == BITS_PER_UNIT
2747 && !(offset % BITS_PER_UNIT))
2749 offset /= BITS_PER_UNIT;
2750 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2751 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2752 [offset]));
2753 /* Folding
2754 const char a[20]="hello";
2755 return a[10];
2757 might lead to offset greater than string length. In this case we
2758 know value is either initialized to 0 or out of bounds. Return 0
2759 in both cases. */
2760 return build_zero_cst (type);
2762 return NULL_TREE;
2765 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2766 SIZE to the memory at bit OFFSET. */
2768 static tree
2769 fold_array_ctor_reference (tree type, tree ctor,
2770 unsigned HOST_WIDE_INT offset,
2771 unsigned HOST_WIDE_INT size,
2772 tree from_decl)
2774 unsigned HOST_WIDE_INT cnt;
2775 tree cfield, cval;
2776 double_int low_bound, elt_size;
2777 double_int index, max_index;
2778 double_int access_index;
2779 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2780 HOST_WIDE_INT inner_offset;
2782 /* Compute low bound and elt size. */
2783 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2784 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2785 if (domain_type && TYPE_MIN_VALUE (domain_type))
2787 /* Static constructors for variably sized objects makes no sense. */
2788 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2789 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2790 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2792 else
2793 low_bound = double_int_zero;
2794 /* Static constructors for variably sized objects makes no sense. */
2795 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2796 == INTEGER_CST);
2797 elt_size =
2798 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2801 /* We can handle only constantly sized accesses that are known to not
2802 be larger than size of array element. */
2803 if (!TYPE_SIZE_UNIT (type)
2804 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2805 || double_int_cmp (elt_size,
2806 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2807 return NULL_TREE;
2809 /* Compute the array index we look for. */
2810 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2811 elt_size, TRUNC_DIV_EXPR);
2812 access_index = double_int_add (access_index, low_bound);
2813 if (index_type)
2814 access_index = double_int_ext (access_index,
2815 TYPE_PRECISION (index_type),
2816 TYPE_UNSIGNED (index_type));
2818 /* And offset within the access. */
2819 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2821 /* See if the array field is large enough to span whole access. We do not
2822 care to fold accesses spanning multiple array indexes. */
2823 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2824 return NULL_TREE;
2826 index = double_int_sub (low_bound, double_int_one);
2827 if (index_type)
2828 index = double_int_ext (index,
2829 TYPE_PRECISION (index_type),
2830 TYPE_UNSIGNED (index_type));
2832 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2834 /* Array constructor might explicitely set index, or specify range
2835 or leave index NULL meaning that it is next index after previous
2836 one. */
2837 if (cfield)
2839 if (TREE_CODE (cfield) == INTEGER_CST)
2840 max_index = index = tree_to_double_int (cfield);
2841 else
2843 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2844 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2845 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2848 else
2850 index = double_int_add (index, double_int_one);
2851 if (index_type)
2852 index = double_int_ext (index,
2853 TYPE_PRECISION (index_type),
2854 TYPE_UNSIGNED (index_type));
2855 max_index = index;
2858 /* Do we have match? */
2859 if (double_int_cmp (access_index, index, 1) >= 0
2860 && double_int_cmp (access_index, max_index, 1) <= 0)
2861 return fold_ctor_reference (type, cval, inner_offset, size,
2862 from_decl);
2864 /* When memory is not explicitely mentioned in constructor,
2865 it is 0 (or out of range). */
2866 return build_zero_cst (type);
2869 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2870 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2872 static tree
2873 fold_nonarray_ctor_reference (tree type, tree ctor,
2874 unsigned HOST_WIDE_INT offset,
2875 unsigned HOST_WIDE_INT size,
2876 tree from_decl)
2878 unsigned HOST_WIDE_INT cnt;
2879 tree cfield, cval;
2881 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2882 cval)
2884 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2885 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2886 tree field_size = DECL_SIZE (cfield);
2887 double_int bitoffset;
2888 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2889 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2890 double_int bitoffset_end, access_end;
2892 /* Variable sized objects in static constructors makes no sense,
2893 but field_size can be NULL for flexible array members. */
2894 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2895 && TREE_CODE (byte_offset) == INTEGER_CST
2896 && (field_size != NULL_TREE
2897 ? TREE_CODE (field_size) == INTEGER_CST
2898 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2900 /* Compute bit offset of the field. */
2901 bitoffset = double_int_add (tree_to_double_int (field_offset),
2902 double_int_mul (byte_offset_cst,
2903 bits_per_unit_cst));
2904 /* Compute bit offset where the field ends. */
2905 if (field_size != NULL_TREE)
2906 bitoffset_end = double_int_add (bitoffset,
2907 tree_to_double_int (field_size));
2908 else
2909 bitoffset_end = double_int_zero;
2911 access_end = double_int_add (uhwi_to_double_int (offset),
2912 uhwi_to_double_int (size));
2914 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2915 [BITOFFSET, BITOFFSET_END)? */
2916 if (double_int_cmp (access_end, bitoffset, 0) > 0
2917 && (field_size == NULL_TREE
2918 || double_int_cmp (uhwi_to_double_int (offset),
2919 bitoffset_end, 0) < 0))
2921 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2922 bitoffset);
2923 /* We do have overlap. Now see if field is large enough to
2924 cover the access. Give up for accesses spanning multiple
2925 fields. */
2926 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2927 return NULL_TREE;
2928 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2929 return NULL_TREE;
2930 return fold_ctor_reference (type, cval,
2931 double_int_to_uhwi (inner_offset), size,
2932 from_decl);
2935 /* When memory is not explicitely mentioned in constructor, it is 0. */
2936 return build_zero_cst (type);
2939 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2940 to the memory at bit OFFSET. */
2942 static tree
2943 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2944 unsigned HOST_WIDE_INT size, tree from_decl)
2946 tree ret;
2948 /* We found the field with exact match. */
2949 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2950 && !offset)
2951 return canonicalize_constructor_val (ctor, from_decl);
2953 /* We are at the end of walk, see if we can view convert the
2954 result. */
2955 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2956 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2957 && operand_equal_p (TYPE_SIZE (type),
2958 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2960 ret = canonicalize_constructor_val (ctor, from_decl);
2961 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2962 if (ret)
2963 STRIP_NOPS (ret);
2964 return ret;
2966 if (TREE_CODE (ctor) == STRING_CST)
2967 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2968 if (TREE_CODE (ctor) == CONSTRUCTOR)
2971 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2972 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2973 return fold_array_ctor_reference (type, ctor, offset, size,
2974 from_decl);
2975 else
2976 return fold_nonarray_ctor_reference (type, ctor, offset, size,
2977 from_decl);
2980 return NULL_TREE;
2983 /* Return the tree representing the element referenced by T if T is an
2984 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2985 names using VALUEIZE. Return NULL_TREE otherwise. */
2987 tree
2988 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2990 tree ctor, idx, base;
2991 HOST_WIDE_INT offset, size, max_size;
2992 tree tem;
2994 if (TREE_THIS_VOLATILE (t))
2995 return NULL_TREE;
2997 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2998 return get_symbol_constant_value (t);
3000 tem = fold_read_from_constant_string (t);
3001 if (tem)
3002 return tem;
3004 switch (TREE_CODE (t))
3006 case ARRAY_REF:
3007 case ARRAY_RANGE_REF:
3008 /* Constant indexes are handled well by get_base_constructor.
3009 Only special case variable offsets.
3010 FIXME: This code can't handle nested references with variable indexes
3011 (they will be handled only by iteration of ccp). Perhaps we can bring
3012 get_ref_base_and_extent here and make it use a valueize callback. */
3013 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3014 && valueize
3015 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3016 && TREE_CODE (idx) == INTEGER_CST)
3018 tree low_bound, unit_size;
3019 double_int doffset;
3021 /* If the resulting bit-offset is constant, track it. */
3022 if ((low_bound = array_ref_low_bound (t),
3023 TREE_CODE (low_bound) == INTEGER_CST)
3024 && (unit_size = array_ref_element_size (t),
3025 host_integerp (unit_size, 1))
3026 && (doffset = double_int_sext
3027 (double_int_sub (TREE_INT_CST (idx),
3028 TREE_INT_CST (low_bound)),
3029 TYPE_PRECISION (TREE_TYPE (idx))),
3030 double_int_fits_in_shwi_p (doffset)))
3032 offset = double_int_to_shwi (doffset);
3033 offset *= TREE_INT_CST_LOW (unit_size);
3034 offset *= BITS_PER_UNIT;
3036 base = TREE_OPERAND (t, 0);
3037 ctor = get_base_constructor (base, &offset, valueize);
3038 /* Empty constructor. Always fold to 0. */
3039 if (ctor == error_mark_node)
3040 return build_zero_cst (TREE_TYPE (t));
3041 /* Out of bound array access. Value is undefined,
3042 but don't fold. */
3043 if (offset < 0)
3044 return NULL_TREE;
3045 /* We can not determine ctor. */
3046 if (!ctor)
3047 return NULL_TREE;
3048 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3049 TREE_INT_CST_LOW (unit_size)
3050 * BITS_PER_UNIT,
3051 base);
3054 /* Fallthru. */
3056 case COMPONENT_REF:
3057 case BIT_FIELD_REF:
3058 case TARGET_MEM_REF:
3059 case MEM_REF:
3060 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3061 ctor = get_base_constructor (base, &offset, valueize);
3063 /* Empty constructor. Always fold to 0. */
3064 if (ctor == error_mark_node)
3065 return build_zero_cst (TREE_TYPE (t));
3066 /* We do not know precise address. */
3067 if (max_size == -1 || max_size != size)
3068 return NULL_TREE;
3069 /* We can not determine ctor. */
3070 if (!ctor)
3071 return NULL_TREE;
3073 /* Out of bound array access. Value is undefined, but don't fold. */
3074 if (offset < 0)
3075 return NULL_TREE;
3077 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3078 base);
3080 case REALPART_EXPR:
3081 case IMAGPART_EXPR:
3083 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3084 if (c && TREE_CODE (c) == COMPLEX_CST)
3085 return fold_build1_loc (EXPR_LOCATION (t),
3086 TREE_CODE (t), TREE_TYPE (t), c);
3087 break;
3090 default:
3091 break;
3094 return NULL_TREE;
3097 tree
3098 fold_const_aggregate_ref (tree t)
3100 return fold_const_aggregate_ref_1 (t, NULL);
3103 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3104 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3105 KNOWN_BINFO carries the binfo describing the true type of
3106 OBJ_TYPE_REF_OBJECT(REF). */
3108 tree
3109 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3111 unsigned HOST_WIDE_INT offset, size;
3112 tree v, fn, vtable;
3114 vtable = v = BINFO_VTABLE (known_binfo);
3115 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3116 if (!v)
3117 return NULL_TREE;
3119 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3121 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3122 v = TREE_OPERAND (v, 0);
3124 else
3125 offset = 0;
3127 if (TREE_CODE (v) != ADDR_EXPR)
3128 return NULL_TREE;
3129 v = TREE_OPERAND (v, 0);
3131 if (TREE_CODE (v) != VAR_DECL
3132 || !DECL_VIRTUAL_P (v)
3133 || !DECL_INITIAL (v)
3134 || DECL_INITIAL (v) == error_mark_node)
3135 return NULL_TREE;
3136 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3137 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3138 offset += token * size;
3139 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3140 offset, size, vtable);
3141 if (!fn || integer_zerop (fn))
3142 return NULL_TREE;
3143 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3144 || TREE_CODE (fn) == FDESC_EXPR);
3145 fn = TREE_OPERAND (fn, 0);
3146 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3148 /* When cgraph node is missing and function is not public, we cannot
3149 devirtualize. This can happen in WHOPR when the actual method
3150 ends up in other partition, because we found devirtualization
3151 possibility too late. */
3152 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3153 return NULL_TREE;
3155 /* Make sure we create a cgraph node for functions we'll reference.
3156 They can be non-existent if the reference comes from an entry
3157 of an external vtable for example. */
3158 cgraph_get_create_node (fn);
3160 return fn;
3163 /* Return true iff VAL is a gimple expression that is known to be
3164 non-negative. Restricted to floating-point inputs. */
3166 bool
3167 gimple_val_nonnegative_real_p (tree val)
3169 gimple def_stmt;
3171 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3173 /* Use existing logic for non-gimple trees. */
3174 if (tree_expr_nonnegative_p (val))
3175 return true;
3177 if (TREE_CODE (val) != SSA_NAME)
3178 return false;
3180 /* Currently we look only at the immediately defining statement
3181 to make this determination, since recursion on defining
3182 statements of operands can lead to quadratic behavior in the
3183 worst case. This is expected to catch almost all occurrences
3184 in practice. It would be possible to implement limited-depth
3185 recursion if important cases are lost. Alternatively, passes
3186 that need this information (such as the pow/powi lowering code
3187 in the cse_sincos pass) could be revised to provide it through
3188 dataflow propagation. */
3190 def_stmt = SSA_NAME_DEF_STMT (val);
3192 if (is_gimple_assign (def_stmt))
3194 tree op0, op1;
3196 /* See fold-const.c:tree_expr_nonnegative_p for additional
3197 cases that could be handled with recursion. */
3199 switch (gimple_assign_rhs_code (def_stmt))
3201 case ABS_EXPR:
3202 /* Always true for floating-point operands. */
3203 return true;
3205 case MULT_EXPR:
3206 /* True if the two operands are identical (since we are
3207 restricted to floating-point inputs). */
3208 op0 = gimple_assign_rhs1 (def_stmt);
3209 op1 = gimple_assign_rhs2 (def_stmt);
3211 if (op0 == op1
3212 || operand_equal_p (op0, op1, 0))
3213 return true;
3215 default:
3216 return false;
3219 else if (is_gimple_call (def_stmt))
3221 tree fndecl = gimple_call_fndecl (def_stmt);
3222 if (fndecl
3223 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3225 tree arg1;
3227 switch (DECL_FUNCTION_CODE (fndecl))
3229 CASE_FLT_FN (BUILT_IN_ACOS):
3230 CASE_FLT_FN (BUILT_IN_ACOSH):
3231 CASE_FLT_FN (BUILT_IN_CABS):
3232 CASE_FLT_FN (BUILT_IN_COSH):
3233 CASE_FLT_FN (BUILT_IN_ERFC):
3234 CASE_FLT_FN (BUILT_IN_EXP):
3235 CASE_FLT_FN (BUILT_IN_EXP10):
3236 CASE_FLT_FN (BUILT_IN_EXP2):
3237 CASE_FLT_FN (BUILT_IN_FABS):
3238 CASE_FLT_FN (BUILT_IN_FDIM):
3239 CASE_FLT_FN (BUILT_IN_HYPOT):
3240 CASE_FLT_FN (BUILT_IN_POW10):
3241 return true;
3243 CASE_FLT_FN (BUILT_IN_SQRT):
3244 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3245 nonnegative inputs. */
3246 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3247 return true;
3249 break;
3251 CASE_FLT_FN (BUILT_IN_POWI):
3252 /* True if the second argument is an even integer. */
3253 arg1 = gimple_call_arg (def_stmt, 1);
3255 if (TREE_CODE (arg1) == INTEGER_CST
3256 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3257 return true;
3259 break;
3261 CASE_FLT_FN (BUILT_IN_POW):
3262 /* True if the second argument is an even integer-valued
3263 real. */
3264 arg1 = gimple_call_arg (def_stmt, 1);
3266 if (TREE_CODE (arg1) == REAL_CST)
3268 REAL_VALUE_TYPE c;
3269 HOST_WIDE_INT n;
3271 c = TREE_REAL_CST (arg1);
3272 n = real_to_integer (&c);
3274 if ((n & 1) == 0)
3276 REAL_VALUE_TYPE cint;
3277 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3278 if (real_identical (&c, &cint))
3279 return true;
3283 break;
3285 default:
3286 return false;
3291 return false;