[AArch64] Add special case when expanding vcond with arms {-1, -1}, {0, 0}.
[official-gcc.git] / gcc / gimple-fold.c
blob738d7fddd4c97bc78eca043ff47bb60df9a9efdc
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "dumpfile.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
31 #include "target.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
37 reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61 symtab_node snode;
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
65 external var. */
66 if (!from_decl
67 || TREE_CODE (from_decl) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl)
69 || (flag_ltrans
70 && symtab_get_node (from_decl)->symbol.in_other_partition))
71 return true;
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
74 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
75 return true;
76 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
77 are always safe. */
78 if (DECL_EXTERNAL (decl)
79 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
80 return true;
81 /* We are folding reference from external vtable. The vtable may reffer
82 to a symbol keyed to other compilation unit. The other compilation
83 unit may be in separate DSO and the symbol may be hidden. */
84 if (DECL_VISIBILITY_SPECIFIED (decl)
85 && DECL_EXTERNAL (decl)
86 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition))
87 return false;
88 /* When function is public, we always can introduce new reference.
89 Exception are the COMDAT functions where introducing a direct
90 reference imply need to include function body in the curren tunit. */
91 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
92 return true;
93 /* We are not at ltrans stage; so don't worry about WHOPR.
94 Also when still gimplifying all referred comdat functions will be
95 produced.
97 As observed in PR20991 for already optimized out comdat virtual functions
98 it may be tempting to not necessarily give up because the copy will be
99 output elsewhere when corresponding vtable is output.
100 This is however not possible - ABI specify that COMDATs are output in
101 units where they are used and when the other unit was compiled with LTO
102 it is possible that vtable was kept public while the function itself
103 was privatized. */
104 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
105 return true;
107 /* OK we are seeing either COMDAT or static variable. In this case we must
108 check that the definition is still around so we can refer it. */
109 if (TREE_CODE (decl) == FUNCTION_DECL)
111 node = cgraph_get_node (decl);
112 /* Check that we still have function body and that we didn't took
113 the decision to eliminate offline copy of the function yet.
114 The second is important when devirtualization happens during final
115 compilation stage when making a new reference no longer makes callee
116 to be compiled. */
117 if (!node || !node->analyzed || node->global.inlined_to)
119 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
120 return false;
123 else if (TREE_CODE (decl) == VAR_DECL)
125 vnode = varpool_get_node (decl);
126 if (!vnode || !vnode->analyzed)
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
129 return false;
132 return true;
135 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
136 acceptable form for is_gimple_min_invariant.
137 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
139 tree
140 canonicalize_constructor_val (tree cval, tree from_decl)
142 tree orig_cval = cval;
143 STRIP_NOPS (cval);
144 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
145 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
147 tree ptr = TREE_OPERAND (cval, 0);
148 if (is_gimple_min_invariant (ptr))
149 cval = build1_loc (EXPR_LOCATION (cval),
150 ADDR_EXPR, TREE_TYPE (ptr),
151 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
152 ptr,
153 fold_convert (ptr_type_node,
154 TREE_OPERAND (cval, 1))));
156 if (TREE_CODE (cval) == ADDR_EXPR)
158 tree base = NULL_TREE;
159 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
161 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
162 if (base)
163 TREE_OPERAND (cval, 0) = base;
165 else
166 base = get_base_address (TREE_OPERAND (cval, 0));
167 if (!base)
168 return NULL_TREE;
170 if ((TREE_CODE (base) == VAR_DECL
171 || TREE_CODE (base) == FUNCTION_DECL)
172 && !can_refer_decl_in_current_unit_p (base, from_decl))
173 return NULL_TREE;
174 if (TREE_CODE (base) == VAR_DECL)
175 TREE_ADDRESSABLE (base) = 1;
176 else if (TREE_CODE (base) == FUNCTION_DECL)
178 /* Make sure we create a cgraph node for functions we'll reference.
179 They can be non-existent if the reference comes from an entry
180 of an external vtable for example. */
181 cgraph_get_create_node (base);
183 /* Fixup types in global initializers. */
184 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
185 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
187 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
188 cval = fold_convert (TREE_TYPE (orig_cval), cval);
189 return cval;
191 return orig_cval;
194 /* If SYM is a constant variable with known value, return the value.
195 NULL_TREE is returned otherwise. */
197 tree
198 get_symbol_constant_value (tree sym)
200 if (const_value_known_p (sym))
202 tree val = DECL_INITIAL (sym);
203 if (val)
205 val = canonicalize_constructor_val (unshare_expr (val), sym);
206 if (val && is_gimple_min_invariant (val))
207 return val;
208 else
209 return NULL_TREE;
211 /* Variables declared 'const' without an initializer
212 have zero as the initializer if they may not be
213 overridden at link or run time. */
214 if (!val
215 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
216 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
217 return build_zero_cst (TREE_TYPE (sym));
220 return NULL_TREE;
225 /* Subroutine of fold_stmt. We perform several simplifications of the
226 memory reference tree EXPR and make sure to re-gimplify them properly
227 after propagation of constant addresses. IS_LHS is true if the
228 reference is supposed to be an lvalue. */
230 static tree
231 maybe_fold_reference (tree expr, bool is_lhs)
233 tree *t = &expr;
234 tree result;
236 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
237 || TREE_CODE (expr) == REALPART_EXPR
238 || TREE_CODE (expr) == IMAGPART_EXPR)
239 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
240 return fold_unary_loc (EXPR_LOCATION (expr),
241 TREE_CODE (expr),
242 TREE_TYPE (expr),
243 TREE_OPERAND (expr, 0));
244 else if (TREE_CODE (expr) == BIT_FIELD_REF
245 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
246 return fold_ternary_loc (EXPR_LOCATION (expr),
247 TREE_CODE (expr),
248 TREE_TYPE (expr),
249 TREE_OPERAND (expr, 0),
250 TREE_OPERAND (expr, 1),
251 TREE_OPERAND (expr, 2));
253 while (handled_component_p (*t))
254 t = &TREE_OPERAND (*t, 0);
256 /* Canonicalize MEM_REFs invariant address operand. Do this first
257 to avoid feeding non-canonical MEM_REFs elsewhere. */
258 if (TREE_CODE (*t) == MEM_REF
259 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
261 bool volatile_p = TREE_THIS_VOLATILE (*t);
262 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
263 TREE_OPERAND (*t, 0),
264 TREE_OPERAND (*t, 1));
265 if (tem)
267 TREE_THIS_VOLATILE (tem) = volatile_p;
268 *t = tem;
269 tem = maybe_fold_reference (expr, is_lhs);
270 if (tem)
271 return tem;
272 return expr;
276 if (!is_lhs
277 && (result = fold_const_aggregate_ref (expr))
278 && is_gimple_min_invariant (result))
279 return result;
281 /* Fold back MEM_REFs to reference trees. */
282 if (TREE_CODE (*t) == MEM_REF
283 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
284 && integer_zerop (TREE_OPERAND (*t, 1))
285 && (TREE_THIS_VOLATILE (*t)
286 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
287 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
288 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
289 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
290 /* We have to look out here to not drop a required conversion
291 from the rhs to the lhs if is_lhs, but we don't have the
292 rhs here to verify that. Thus require strict type
293 compatibility. */
294 && types_compatible_p (TREE_TYPE (*t),
295 TREE_TYPE (TREE_OPERAND
296 (TREE_OPERAND (*t, 0), 0))))
298 tree tem;
299 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
300 tem = maybe_fold_reference (expr, is_lhs);
301 if (tem)
302 return tem;
303 return expr;
305 else if (TREE_CODE (*t) == TARGET_MEM_REF)
307 tree tem = maybe_fold_tmr (*t);
308 if (tem)
310 *t = tem;
311 tem = maybe_fold_reference (expr, is_lhs);
312 if (tem)
313 return tem;
314 return expr;
318 return NULL_TREE;
322 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
323 replacement rhs for the statement or NULL_TREE if no simplification
324 could be made. It is assumed that the operands have been previously
325 folded. */
327 static tree
328 fold_gimple_assign (gimple_stmt_iterator *si)
330 gimple stmt = gsi_stmt (*si);
331 enum tree_code subcode = gimple_assign_rhs_code (stmt);
332 location_t loc = gimple_location (stmt);
334 tree result = NULL_TREE;
336 switch (get_gimple_rhs_class (subcode))
338 case GIMPLE_SINGLE_RHS:
340 tree rhs = gimple_assign_rhs1 (stmt);
342 if (REFERENCE_CLASS_P (rhs))
343 return maybe_fold_reference (rhs, false);
345 else if (TREE_CODE (rhs) == ADDR_EXPR)
347 tree ref = TREE_OPERAND (rhs, 0);
348 tree tem = maybe_fold_reference (ref, true);
349 if (tem
350 && TREE_CODE (tem) == MEM_REF
351 && integer_zerop (TREE_OPERAND (tem, 1)))
352 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
353 else if (tem)
354 result = fold_convert (TREE_TYPE (rhs),
355 build_fold_addr_expr_loc (loc, tem));
356 else if (TREE_CODE (ref) == MEM_REF
357 && integer_zerop (TREE_OPERAND (ref, 1)))
358 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
361 else if (TREE_CODE (rhs) == CONSTRUCTOR
362 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
363 && (CONSTRUCTOR_NELTS (rhs)
364 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
366 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
367 unsigned i;
368 tree val;
370 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
371 if (TREE_CODE (val) != INTEGER_CST
372 && TREE_CODE (val) != REAL_CST
373 && TREE_CODE (val) != FIXED_CST)
374 return NULL_TREE;
376 return build_vector_from_ctor (TREE_TYPE (rhs),
377 CONSTRUCTOR_ELTS (rhs));
380 else if (DECL_P (rhs))
381 return get_symbol_constant_value (rhs);
383 /* If we couldn't fold the RHS, hand over to the generic
384 fold routines. */
385 if (result == NULL_TREE)
386 result = fold (rhs);
388 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
389 that may have been added by fold, and "useless" type
390 conversions that might now be apparent due to propagation. */
391 STRIP_USELESS_TYPE_CONVERSION (result);
393 if (result != rhs && valid_gimple_rhs_p (result))
394 return result;
396 return NULL_TREE;
398 break;
400 case GIMPLE_UNARY_RHS:
402 tree rhs = gimple_assign_rhs1 (stmt);
404 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
405 if (result)
407 /* If the operation was a conversion do _not_ mark a
408 resulting constant with TREE_OVERFLOW if the original
409 constant was not. These conversions have implementation
410 defined behavior and retaining the TREE_OVERFLOW flag
411 here would confuse later passes such as VRP. */
412 if (CONVERT_EXPR_CODE_P (subcode)
413 && TREE_CODE (result) == INTEGER_CST
414 && TREE_CODE (rhs) == INTEGER_CST)
415 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
417 STRIP_USELESS_TYPE_CONVERSION (result);
418 if (valid_gimple_rhs_p (result))
419 return result;
422 break;
424 case GIMPLE_BINARY_RHS:
425 /* Try to canonicalize for boolean-typed X the comparisons
426 X == 0, X == 1, X != 0, and X != 1. */
427 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
428 || gimple_assign_rhs_code (stmt) == NE_EXPR)
430 tree lhs = gimple_assign_lhs (stmt);
431 tree op1 = gimple_assign_rhs1 (stmt);
432 tree op2 = gimple_assign_rhs2 (stmt);
433 tree type = TREE_TYPE (op1);
435 /* Check whether the comparison operands are of the same boolean
436 type as the result type is.
437 Check that second operand is an integer-constant with value
438 one or zero. */
439 if (TREE_CODE (op2) == INTEGER_CST
440 && (integer_zerop (op2) || integer_onep (op2))
441 && useless_type_conversion_p (TREE_TYPE (lhs), type))
443 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
444 bool is_logical_not = false;
446 /* X == 0 and X != 1 is a logical-not.of X
447 X == 1 and X != 0 is X */
448 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
449 || (cmp_code == NE_EXPR && integer_onep (op2)))
450 is_logical_not = true;
452 if (is_logical_not == false)
453 result = op1;
454 /* Only for one-bit precision typed X the transformation
455 !X -> ~X is valied. */
456 else if (TYPE_PRECISION (type) == 1)
457 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
458 type, op1);
459 /* Otherwise we use !X -> X ^ 1. */
460 else
461 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
462 type, op1, build_int_cst (type, 1));
467 if (!result)
468 result = fold_binary_loc (loc, subcode,
469 TREE_TYPE (gimple_assign_lhs (stmt)),
470 gimple_assign_rhs1 (stmt),
471 gimple_assign_rhs2 (stmt));
473 if (result)
475 STRIP_USELESS_TYPE_CONVERSION (result);
476 if (valid_gimple_rhs_p (result))
477 return result;
479 break;
481 case GIMPLE_TERNARY_RHS:
482 /* Try to fold a conditional expression. */
483 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
485 tree op0 = gimple_assign_rhs1 (stmt);
486 tree tem;
487 bool set = false;
488 location_t cond_loc = gimple_location (stmt);
490 if (COMPARISON_CLASS_P (op0))
492 fold_defer_overflow_warnings ();
493 tem = fold_binary_loc (cond_loc,
494 TREE_CODE (op0), TREE_TYPE (op0),
495 TREE_OPERAND (op0, 0),
496 TREE_OPERAND (op0, 1));
497 /* This is actually a conditional expression, not a GIMPLE
498 conditional statement, however, the valid_gimple_rhs_p
499 test still applies. */
500 set = (tem && is_gimple_condexpr (tem)
501 && valid_gimple_rhs_p (tem));
502 fold_undefer_overflow_warnings (set, stmt, 0);
504 else if (is_gimple_min_invariant (op0))
506 tem = op0;
507 set = true;
509 else
510 return NULL_TREE;
512 if (set)
513 result = fold_build3_loc (cond_loc, COND_EXPR,
514 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
515 gimple_assign_rhs2 (stmt),
516 gimple_assign_rhs3 (stmt));
519 if (!result)
520 result = fold_ternary_loc (loc, subcode,
521 TREE_TYPE (gimple_assign_lhs (stmt)),
522 gimple_assign_rhs1 (stmt),
523 gimple_assign_rhs2 (stmt),
524 gimple_assign_rhs3 (stmt));
526 if (result)
528 STRIP_USELESS_TYPE_CONVERSION (result);
529 if (valid_gimple_rhs_p (result))
530 return result;
532 break;
534 case GIMPLE_INVALID_RHS:
535 gcc_unreachable ();
538 return NULL_TREE;
541 /* Attempt to fold a conditional statement. Return true if any changes were
542 made. We only attempt to fold the condition expression, and do not perform
543 any transformation that would require alteration of the cfg. It is
544 assumed that the operands have been previously folded. */
546 static bool
547 fold_gimple_cond (gimple stmt)
549 tree result = fold_binary_loc (gimple_location (stmt),
550 gimple_cond_code (stmt),
551 boolean_type_node,
552 gimple_cond_lhs (stmt),
553 gimple_cond_rhs (stmt));
555 if (result)
557 STRIP_USELESS_TYPE_CONVERSION (result);
558 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
560 gimple_cond_set_condition_from_tree (stmt, result);
561 return true;
565 return false;
568 /* Convert EXPR into a GIMPLE value suitable for substitution on the
569 RHS of an assignment. Insert the necessary statements before
570 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
571 is replaced. If the call is expected to produces a result, then it
572 is replaced by an assignment of the new RHS to the result variable.
573 If the result is to be ignored, then the call is replaced by a
574 GIMPLE_NOP. A proper VDEF chain is retained by making the first
575 VUSE and the last VDEF of the whole sequence be the same as the replaced
576 statement and using new SSA names for stores in between. */
578 void
579 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
581 tree lhs;
582 gimple stmt, new_stmt;
583 gimple_stmt_iterator i;
584 gimple_seq stmts = NULL;
585 struct gimplify_ctx gctx;
586 gimple laststore;
587 tree reaching_vuse;
589 stmt = gsi_stmt (*si_p);
591 gcc_assert (is_gimple_call (stmt));
593 push_gimplify_context (&gctx);
594 gctx.into_ssa = gimple_in_ssa_p (cfun);
596 lhs = gimple_call_lhs (stmt);
597 if (lhs == NULL_TREE)
599 gimplify_and_add (expr, &stmts);
600 /* We can end up with folding a memcpy of an empty class assignment
601 which gets optimized away by C++ gimplification. */
602 if (gimple_seq_empty_p (stmts))
604 pop_gimplify_context (NULL);
605 if (gimple_in_ssa_p (cfun))
607 unlink_stmt_vdef (stmt);
608 release_defs (stmt);
610 gsi_replace (si_p, gimple_build_nop (), true);
611 return;
614 else
616 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
617 new_stmt = gimple_build_assign (lhs, tmp);
618 i = gsi_last (stmts);
619 gsi_insert_after_without_update (&i, new_stmt,
620 GSI_CONTINUE_LINKING);
623 pop_gimplify_context (NULL);
625 if (gimple_has_location (stmt))
626 annotate_all_with_location (stmts, gimple_location (stmt));
628 /* First iterate over the replacement statements backward, assigning
629 virtual operands to their defining statements. */
630 laststore = NULL;
631 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
633 new_stmt = gsi_stmt (i);
634 if ((gimple_assign_single_p (new_stmt)
635 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
636 || (is_gimple_call (new_stmt)
637 && (gimple_call_flags (new_stmt)
638 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
640 tree vdef;
641 if (!laststore)
642 vdef = gimple_vdef (stmt);
643 else
644 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
645 gimple_set_vdef (new_stmt, vdef);
646 if (vdef && TREE_CODE (vdef) == SSA_NAME)
647 SSA_NAME_DEF_STMT (vdef) = new_stmt;
648 laststore = new_stmt;
652 /* Second iterate over the statements forward, assigning virtual
653 operands to their uses. */
654 reaching_vuse = gimple_vuse (stmt);
655 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
657 new_stmt = gsi_stmt (i);
658 /* If the new statement possibly has a VUSE, update it with exact SSA
659 name we know will reach this one. */
660 if (gimple_has_mem_ops (new_stmt))
661 gimple_set_vuse (new_stmt, reaching_vuse);
662 gimple_set_modified (new_stmt, true);
663 if (gimple_vdef (new_stmt))
664 reaching_vuse = gimple_vdef (new_stmt);
667 /* If the new sequence does not do a store release the virtual
668 definition of the original statement. */
669 if (reaching_vuse
670 && reaching_vuse == gimple_vuse (stmt))
672 tree vdef = gimple_vdef (stmt);
673 if (vdef
674 && TREE_CODE (vdef) == SSA_NAME)
676 unlink_stmt_vdef (stmt);
677 release_ssa_name (vdef);
681 /* Finally replace the original statement with the sequence. */
682 gsi_replace_with_seq (si_p, stmts, false);
685 /* Return the string length, maximum string length or maximum value of
686 ARG in LENGTH.
687 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
688 is not NULL and, for TYPE == 0, its value is not equal to the length
689 we determine or if we are unable to determine the length or value,
690 return false. VISITED is a bitmap of visited variables.
691 TYPE is 0 if string length should be returned, 1 for maximum string
692 length and 2 for maximum value ARG can have. */
694 static bool
695 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
697 tree var, val;
698 gimple def_stmt;
700 if (TREE_CODE (arg) != SSA_NAME)
702 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
703 if (TREE_CODE (arg) == ADDR_EXPR
704 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
705 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
707 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
708 if (TREE_CODE (aop0) == INDIRECT_REF
709 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
710 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
711 length, visited, type);
714 if (type == 2)
716 val = arg;
717 if (TREE_CODE (val) != INTEGER_CST
718 || tree_int_cst_sgn (val) < 0)
719 return false;
721 else
722 val = c_strlen (arg, 1);
723 if (!val)
724 return false;
726 if (*length)
728 if (type > 0)
730 if (TREE_CODE (*length) != INTEGER_CST
731 || TREE_CODE (val) != INTEGER_CST)
732 return false;
734 if (tree_int_cst_lt (*length, val))
735 *length = val;
736 return true;
738 else if (simple_cst_equal (val, *length) != 1)
739 return false;
742 *length = val;
743 return true;
746 /* If ARG is registered for SSA update we cannot look at its defining
747 statement. */
748 if (name_registered_for_update_p (arg))
749 return false;
751 /* If we were already here, break the infinite cycle. */
752 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
753 return true;
755 var = arg;
756 def_stmt = SSA_NAME_DEF_STMT (var);
758 switch (gimple_code (def_stmt))
760 case GIMPLE_ASSIGN:
761 /* The RHS of the statement defining VAR must either have a
762 constant length or come from another SSA_NAME with a constant
763 length. */
764 if (gimple_assign_single_p (def_stmt)
765 || gimple_assign_unary_nop_p (def_stmt))
767 tree rhs = gimple_assign_rhs1 (def_stmt);
768 return get_maxval_strlen (rhs, length, visited, type);
770 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
772 tree op2 = gimple_assign_rhs2 (def_stmt);
773 tree op3 = gimple_assign_rhs3 (def_stmt);
774 return get_maxval_strlen (op2, length, visited, type)
775 && get_maxval_strlen (op3, length, visited, type);
777 return false;
779 case GIMPLE_PHI:
781 /* All the arguments of the PHI node must have the same constant
782 length. */
783 unsigned i;
785 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
787 tree arg = gimple_phi_arg (def_stmt, i)->def;
789 /* If this PHI has itself as an argument, we cannot
790 determine the string length of this argument. However,
791 if we can find a constant string length for the other
792 PHI args then we can still be sure that this is a
793 constant string length. So be optimistic and just
794 continue with the next argument. */
795 if (arg == gimple_phi_result (def_stmt))
796 continue;
798 if (!get_maxval_strlen (arg, length, visited, type))
799 return false;
802 return true;
804 default:
805 return false;
810 /* Fold builtin call in statement STMT. Returns a simplified tree.
811 We may return a non-constant expression, including another call
812 to a different function and with different arguments, e.g.,
813 substituting memcpy for strcpy when the string length is known.
814 Note that some builtins expand into inline code that may not
815 be valid in GIMPLE. Callers must take care. */
817 tree
818 gimple_fold_builtin (gimple stmt)
820 tree result, val[3];
821 tree callee, a;
822 int arg_idx, type;
823 bitmap visited;
824 bool ignore;
825 int nargs;
826 location_t loc = gimple_location (stmt);
828 gcc_assert (is_gimple_call (stmt));
830 ignore = (gimple_call_lhs (stmt) == NULL);
832 /* First try the generic builtin folder. If that succeeds, return the
833 result directly. */
834 result = fold_call_stmt (stmt, ignore);
835 if (result)
837 if (ignore)
838 STRIP_NOPS (result);
839 return result;
842 /* Ignore MD builtins. */
843 callee = gimple_call_fndecl (stmt);
844 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
845 return NULL_TREE;
847 /* Give up for always_inline inline builtins until they are
848 inlined. */
849 if (avoid_folding_inline_builtin (callee))
850 return NULL_TREE;
852 /* If the builtin could not be folded, and it has no argument list,
853 we're done. */
854 nargs = gimple_call_num_args (stmt);
855 if (nargs == 0)
856 return NULL_TREE;
858 /* Limit the work only for builtins we know how to simplify. */
859 switch (DECL_FUNCTION_CODE (callee))
861 case BUILT_IN_STRLEN:
862 case BUILT_IN_FPUTS:
863 case BUILT_IN_FPUTS_UNLOCKED:
864 arg_idx = 0;
865 type = 0;
866 break;
867 case BUILT_IN_STRCPY:
868 case BUILT_IN_STRNCPY:
869 arg_idx = 1;
870 type = 0;
871 break;
872 case BUILT_IN_MEMCPY_CHK:
873 case BUILT_IN_MEMPCPY_CHK:
874 case BUILT_IN_MEMMOVE_CHK:
875 case BUILT_IN_MEMSET_CHK:
876 case BUILT_IN_STRNCPY_CHK:
877 case BUILT_IN_STPNCPY_CHK:
878 arg_idx = 2;
879 type = 2;
880 break;
881 case BUILT_IN_STRCPY_CHK:
882 case BUILT_IN_STPCPY_CHK:
883 arg_idx = 1;
884 type = 1;
885 break;
886 case BUILT_IN_SNPRINTF_CHK:
887 case BUILT_IN_VSNPRINTF_CHK:
888 arg_idx = 1;
889 type = 2;
890 break;
891 default:
892 return NULL_TREE;
895 if (arg_idx >= nargs)
896 return NULL_TREE;
898 /* Try to use the dataflow information gathered by the CCP process. */
899 visited = BITMAP_ALLOC (NULL);
900 bitmap_clear (visited);
902 memset (val, 0, sizeof (val));
903 a = gimple_call_arg (stmt, arg_idx);
904 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
905 val[arg_idx] = NULL_TREE;
907 BITMAP_FREE (visited);
909 result = NULL_TREE;
910 switch (DECL_FUNCTION_CODE (callee))
912 case BUILT_IN_STRLEN:
913 if (val[0] && nargs == 1)
915 tree new_val =
916 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
918 /* If the result is not a valid gimple value, or not a cast
919 of a valid gimple value, then we cannot use the result. */
920 if (is_gimple_val (new_val)
921 || (CONVERT_EXPR_P (new_val)
922 && is_gimple_val (TREE_OPERAND (new_val, 0))))
923 return new_val;
925 break;
927 case BUILT_IN_STRCPY:
928 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
929 result = fold_builtin_strcpy (loc, callee,
930 gimple_call_arg (stmt, 0),
931 gimple_call_arg (stmt, 1),
932 val[1]);
933 break;
935 case BUILT_IN_STRNCPY:
936 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
937 result = fold_builtin_strncpy (loc, callee,
938 gimple_call_arg (stmt, 0),
939 gimple_call_arg (stmt, 1),
940 gimple_call_arg (stmt, 2),
941 val[1]);
942 break;
944 case BUILT_IN_FPUTS:
945 if (nargs == 2)
946 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
947 gimple_call_arg (stmt, 1),
948 ignore, false, val[0]);
949 break;
951 case BUILT_IN_FPUTS_UNLOCKED:
952 if (nargs == 2)
953 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
954 gimple_call_arg (stmt, 1),
955 ignore, true, val[0]);
956 break;
958 case BUILT_IN_MEMCPY_CHK:
959 case BUILT_IN_MEMPCPY_CHK:
960 case BUILT_IN_MEMMOVE_CHK:
961 case BUILT_IN_MEMSET_CHK:
962 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
963 result = fold_builtin_memory_chk (loc, callee,
964 gimple_call_arg (stmt, 0),
965 gimple_call_arg (stmt, 1),
966 gimple_call_arg (stmt, 2),
967 gimple_call_arg (stmt, 3),
968 val[2], ignore,
969 DECL_FUNCTION_CODE (callee));
970 break;
972 case BUILT_IN_STRCPY_CHK:
973 case BUILT_IN_STPCPY_CHK:
974 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
975 result = fold_builtin_stxcpy_chk (loc, callee,
976 gimple_call_arg (stmt, 0),
977 gimple_call_arg (stmt, 1),
978 gimple_call_arg (stmt, 2),
979 val[1], ignore,
980 DECL_FUNCTION_CODE (callee));
981 break;
983 case BUILT_IN_STRNCPY_CHK:
984 case BUILT_IN_STPNCPY_CHK:
985 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
986 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
987 gimple_call_arg (stmt, 1),
988 gimple_call_arg (stmt, 2),
989 gimple_call_arg (stmt, 3),
990 val[2], ignore,
991 DECL_FUNCTION_CODE (callee));
992 break;
994 case BUILT_IN_SNPRINTF_CHK:
995 case BUILT_IN_VSNPRINTF_CHK:
996 if (val[1] && is_gimple_val (val[1]))
997 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
998 DECL_FUNCTION_CODE (callee));
999 break;
1001 default:
1002 gcc_unreachable ();
1005 if (result && ignore)
1006 result = fold_ignored_result (result);
1007 return result;
1011 /* Return a binfo to be used for devirtualization of calls based on an object
1012 represented by a declaration (i.e. a global or automatically allocated one)
1013 or NULL if it cannot be found or is not safe. CST is expected to be an
1014 ADDR_EXPR of such object or the function will return NULL. Currently it is
1015 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1017 tree
1018 gimple_extract_devirt_binfo_from_cst (tree cst)
1020 HOST_WIDE_INT offset, size, max_size;
1021 tree base, type, expected_type, binfo;
1022 bool last_artificial = false;
1024 if (!flag_devirtualize
1025 || TREE_CODE (cst) != ADDR_EXPR
1026 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1027 return NULL_TREE;
1029 cst = TREE_OPERAND (cst, 0);
1030 expected_type = TREE_TYPE (cst);
1031 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1032 type = TREE_TYPE (base);
1033 if (!DECL_P (base)
1034 || max_size == -1
1035 || max_size != size
1036 || TREE_CODE (type) != RECORD_TYPE)
1037 return NULL_TREE;
1039 /* Find the sub-object the constant actually refers to and mark whether it is
1040 an artificial one (as opposed to a user-defined one). */
1041 while (true)
1043 HOST_WIDE_INT pos, size;
1044 tree fld;
1046 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1047 break;
1048 if (offset < 0)
1049 return NULL_TREE;
1051 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1053 if (TREE_CODE (fld) != FIELD_DECL)
1054 continue;
1056 pos = int_bit_position (fld);
1057 size = tree_low_cst (DECL_SIZE (fld), 1);
1058 if (pos <= offset && (pos + size) > offset)
1059 break;
1061 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1062 return NULL_TREE;
1064 last_artificial = DECL_ARTIFICIAL (fld);
1065 type = TREE_TYPE (fld);
1066 offset -= pos;
1068 /* Artificial sub-objects are ancestors, we do not want to use them for
1069 devirtualization, at least not here. */
1070 if (last_artificial)
1071 return NULL_TREE;
1072 binfo = TYPE_BINFO (type);
1073 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1074 return NULL_TREE;
1075 else
1076 return binfo;
1079 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1080 The statement may be replaced by another statement, e.g., if the call
1081 simplifies to a constant value. Return true if any changes were made.
1082 It is assumed that the operands have been previously folded. */
1084 static bool
1085 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1087 gimple stmt = gsi_stmt (*gsi);
1088 tree callee;
1089 bool changed = false;
1090 unsigned i;
1092 /* Fold *& in call arguments. */
1093 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1094 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1096 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1097 if (tmp)
1099 gimple_call_set_arg (stmt, i, tmp);
1100 changed = true;
1104 /* Check for virtual calls that became direct calls. */
1105 callee = gimple_call_fn (stmt);
1106 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1108 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1110 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1111 changed = true;
1113 else
1115 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1116 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1117 if (binfo)
1119 HOST_WIDE_INT token
1120 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1121 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1122 if (fndecl)
1124 gimple_call_set_fndecl (stmt, fndecl);
1125 changed = true;
1131 if (inplace)
1132 return changed;
1134 /* Check for builtins that CCP can handle using information not
1135 available in the generic fold routines. */
1136 callee = gimple_call_fndecl (stmt);
1137 if (callee && DECL_BUILT_IN (callee))
1139 tree result = gimple_fold_builtin (stmt);
1140 if (result)
1142 if (!update_call_from_tree (gsi, result))
1143 gimplify_and_update_call_from_tree (gsi, result);
1144 changed = true;
1146 else if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1147 changed |= targetm.gimple_fold_builtin (gsi);
1150 return changed;
1153 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1154 distinguishes both cases. */
1156 static bool
1157 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1159 bool changed = false;
1160 gimple stmt = gsi_stmt (*gsi);
1161 unsigned i;
1163 /* Fold the main computation performed by the statement. */
1164 switch (gimple_code (stmt))
1166 case GIMPLE_ASSIGN:
1168 unsigned old_num_ops = gimple_num_ops (stmt);
1169 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1170 tree lhs = gimple_assign_lhs (stmt);
1171 tree new_rhs;
1172 /* First canonicalize operand order. This avoids building new
1173 trees if this is the only thing fold would later do. */
1174 if ((commutative_tree_code (subcode)
1175 || commutative_ternary_tree_code (subcode))
1176 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1177 gimple_assign_rhs2 (stmt), false))
1179 tree tem = gimple_assign_rhs1 (stmt);
1180 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1181 gimple_assign_set_rhs2 (stmt, tem);
1182 changed = true;
1184 new_rhs = fold_gimple_assign (gsi);
1185 if (new_rhs
1186 && !useless_type_conversion_p (TREE_TYPE (lhs),
1187 TREE_TYPE (new_rhs)))
1188 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1189 if (new_rhs
1190 && (!inplace
1191 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1193 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1194 changed = true;
1196 break;
1199 case GIMPLE_COND:
1200 changed |= fold_gimple_cond (stmt);
1201 break;
1203 case GIMPLE_CALL:
1204 changed |= gimple_fold_call (gsi, inplace);
1205 break;
1207 case GIMPLE_ASM:
1208 /* Fold *& in asm operands. */
1210 size_t noutputs;
1211 const char **oconstraints;
1212 const char *constraint;
1213 bool allows_mem, allows_reg;
1215 noutputs = gimple_asm_noutputs (stmt);
1216 oconstraints = XALLOCAVEC (const char *, noutputs);
1218 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1220 tree link = gimple_asm_output_op (stmt, i);
1221 tree op = TREE_VALUE (link);
1222 oconstraints[i]
1223 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1224 if (REFERENCE_CLASS_P (op)
1225 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1227 TREE_VALUE (link) = op;
1228 changed = true;
1231 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1233 tree link = gimple_asm_input_op (stmt, i);
1234 tree op = TREE_VALUE (link);
1235 constraint
1236 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1237 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1238 oconstraints, &allows_mem, &allows_reg);
1239 if (REFERENCE_CLASS_P (op)
1240 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1241 != NULL_TREE)
1243 TREE_VALUE (link) = op;
1244 changed = true;
1248 break;
1250 case GIMPLE_DEBUG:
1251 if (gimple_debug_bind_p (stmt))
1253 tree val = gimple_debug_bind_get_value (stmt);
1254 if (val
1255 && REFERENCE_CLASS_P (val))
1257 tree tem = maybe_fold_reference (val, false);
1258 if (tem)
1260 gimple_debug_bind_set_value (stmt, tem);
1261 changed = true;
1264 else if (val
1265 && TREE_CODE (val) == ADDR_EXPR)
1267 tree ref = TREE_OPERAND (val, 0);
1268 tree tem = maybe_fold_reference (ref, false);
1269 if (tem)
1271 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1272 gimple_debug_bind_set_value (stmt, tem);
1273 changed = true;
1277 break;
1279 default:;
1282 stmt = gsi_stmt (*gsi);
1284 /* Fold *& on the lhs. */
1285 if (gimple_has_lhs (stmt))
1287 tree lhs = gimple_get_lhs (stmt);
1288 if (lhs && REFERENCE_CLASS_P (lhs))
1290 tree new_lhs = maybe_fold_reference (lhs, true);
1291 if (new_lhs)
1293 gimple_set_lhs (stmt, new_lhs);
1294 changed = true;
1299 return changed;
1302 /* Fold the statement pointed to by GSI. In some cases, this function may
1303 replace the whole statement with a new one. Returns true iff folding
1304 makes any changes.
1305 The statement pointed to by GSI should be in valid gimple form but may
1306 be in unfolded state as resulting from for example constant propagation
1307 which can produce *&x = 0. */
1309 bool
1310 fold_stmt (gimple_stmt_iterator *gsi)
1312 return fold_stmt_1 (gsi, false);
1315 /* Perform the minimal folding on statement *GSI. Only operations like
1316 *&x created by constant propagation are handled. The statement cannot
1317 be replaced with a new one. Return true if the statement was
1318 changed, false otherwise.
1319 The statement *GSI should be in valid gimple form but may
1320 be in unfolded state as resulting from for example constant propagation
1321 which can produce *&x = 0. */
1323 bool
1324 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1326 gimple stmt = gsi_stmt (*gsi);
1327 bool changed = fold_stmt_1 (gsi, true);
1328 gcc_assert (gsi_stmt (*gsi) == stmt);
1329 return changed;
1332 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1333 if EXPR is null or we don't know how.
1334 If non-null, the result always has boolean type. */
1336 static tree
1337 canonicalize_bool (tree expr, bool invert)
1339 if (!expr)
1340 return NULL_TREE;
1341 else if (invert)
1343 if (integer_nonzerop (expr))
1344 return boolean_false_node;
1345 else if (integer_zerop (expr))
1346 return boolean_true_node;
1347 else if (TREE_CODE (expr) == SSA_NAME)
1348 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1349 build_int_cst (TREE_TYPE (expr), 0));
1350 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1351 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1352 boolean_type_node,
1353 TREE_OPERAND (expr, 0),
1354 TREE_OPERAND (expr, 1));
1355 else
1356 return NULL_TREE;
1358 else
1360 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1361 return expr;
1362 if (integer_nonzerop (expr))
1363 return boolean_true_node;
1364 else if (integer_zerop (expr))
1365 return boolean_false_node;
1366 else if (TREE_CODE (expr) == SSA_NAME)
1367 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1368 build_int_cst (TREE_TYPE (expr), 0));
1369 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1370 return fold_build2 (TREE_CODE (expr),
1371 boolean_type_node,
1372 TREE_OPERAND (expr, 0),
1373 TREE_OPERAND (expr, 1));
1374 else
1375 return NULL_TREE;
1379 /* Check to see if a boolean expression EXPR is logically equivalent to the
1380 comparison (OP1 CODE OP2). Check for various identities involving
1381 SSA_NAMEs. */
1383 static bool
1384 same_bool_comparison_p (const_tree expr, enum tree_code code,
1385 const_tree op1, const_tree op2)
1387 gimple s;
1389 /* The obvious case. */
1390 if (TREE_CODE (expr) == code
1391 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1392 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1393 return true;
1395 /* Check for comparing (name, name != 0) and the case where expr
1396 is an SSA_NAME with a definition matching the comparison. */
1397 if (TREE_CODE (expr) == SSA_NAME
1398 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1400 if (operand_equal_p (expr, op1, 0))
1401 return ((code == NE_EXPR && integer_zerop (op2))
1402 || (code == EQ_EXPR && integer_nonzerop (op2)));
1403 s = SSA_NAME_DEF_STMT (expr);
1404 if (is_gimple_assign (s)
1405 && gimple_assign_rhs_code (s) == code
1406 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1407 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1408 return true;
1411 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1412 of name is a comparison, recurse. */
1413 if (TREE_CODE (op1) == SSA_NAME
1414 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1416 s = SSA_NAME_DEF_STMT (op1);
1417 if (is_gimple_assign (s)
1418 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1420 enum tree_code c = gimple_assign_rhs_code (s);
1421 if ((c == NE_EXPR && integer_zerop (op2))
1422 || (c == EQ_EXPR && integer_nonzerop (op2)))
1423 return same_bool_comparison_p (expr, c,
1424 gimple_assign_rhs1 (s),
1425 gimple_assign_rhs2 (s));
1426 if ((c == EQ_EXPR && integer_zerop (op2))
1427 || (c == NE_EXPR && integer_nonzerop (op2)))
1428 return same_bool_comparison_p (expr,
1429 invert_tree_comparison (c, false),
1430 gimple_assign_rhs1 (s),
1431 gimple_assign_rhs2 (s));
1434 return false;
1437 /* Check to see if two boolean expressions OP1 and OP2 are logically
1438 equivalent. */
1440 static bool
1441 same_bool_result_p (const_tree op1, const_tree op2)
1443 /* Simple cases first. */
1444 if (operand_equal_p (op1, op2, 0))
1445 return true;
1447 /* Check the cases where at least one of the operands is a comparison.
1448 These are a bit smarter than operand_equal_p in that they apply some
1449 identifies on SSA_NAMEs. */
1450 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1451 && same_bool_comparison_p (op1, TREE_CODE (op2),
1452 TREE_OPERAND (op2, 0),
1453 TREE_OPERAND (op2, 1)))
1454 return true;
1455 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1456 && same_bool_comparison_p (op2, TREE_CODE (op1),
1457 TREE_OPERAND (op1, 0),
1458 TREE_OPERAND (op1, 1)))
1459 return true;
1461 /* Default case. */
1462 return false;
1465 /* Forward declarations for some mutually recursive functions. */
1467 static tree
1468 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1469 enum tree_code code2, tree op2a, tree op2b);
1470 static tree
1471 and_var_with_comparison (tree var, bool invert,
1472 enum tree_code code2, tree op2a, tree op2b);
1473 static tree
1474 and_var_with_comparison_1 (gimple stmt,
1475 enum tree_code code2, tree op2a, tree op2b);
1476 static tree
1477 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1478 enum tree_code code2, tree op2a, tree op2b);
1479 static tree
1480 or_var_with_comparison (tree var, bool invert,
1481 enum tree_code code2, tree op2a, tree op2b);
1482 static tree
1483 or_var_with_comparison_1 (gimple stmt,
1484 enum tree_code code2, tree op2a, tree op2b);
1486 /* Helper function for and_comparisons_1: try to simplify the AND of the
1487 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1488 If INVERT is true, invert the value of the VAR before doing the AND.
1489 Return NULL_EXPR if we can't simplify this to a single expression. */
1491 static tree
1492 and_var_with_comparison (tree var, bool invert,
1493 enum tree_code code2, tree op2a, tree op2b)
1495 tree t;
1496 gimple stmt = SSA_NAME_DEF_STMT (var);
1498 /* We can only deal with variables whose definitions are assignments. */
1499 if (!is_gimple_assign (stmt))
1500 return NULL_TREE;
1502 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1503 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1504 Then we only have to consider the simpler non-inverted cases. */
1505 if (invert)
1506 t = or_var_with_comparison_1 (stmt,
1507 invert_tree_comparison (code2, false),
1508 op2a, op2b);
1509 else
1510 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1511 return canonicalize_bool (t, invert);
1514 /* Try to simplify the AND of the ssa variable defined by the assignment
1515 STMT with the comparison specified by (OP2A CODE2 OP2B).
1516 Return NULL_EXPR if we can't simplify this to a single expression. */
1518 static tree
1519 and_var_with_comparison_1 (gimple stmt,
1520 enum tree_code code2, tree op2a, tree op2b)
1522 tree var = gimple_assign_lhs (stmt);
1523 tree true_test_var = NULL_TREE;
1524 tree false_test_var = NULL_TREE;
1525 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1527 /* Check for identities like (var AND (var == 0)) => false. */
1528 if (TREE_CODE (op2a) == SSA_NAME
1529 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1531 if ((code2 == NE_EXPR && integer_zerop (op2b))
1532 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1534 true_test_var = op2a;
1535 if (var == true_test_var)
1536 return var;
1538 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1539 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1541 false_test_var = op2a;
1542 if (var == false_test_var)
1543 return boolean_false_node;
1547 /* If the definition is a comparison, recurse on it. */
1548 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1550 tree t = and_comparisons_1 (innercode,
1551 gimple_assign_rhs1 (stmt),
1552 gimple_assign_rhs2 (stmt),
1553 code2,
1554 op2a,
1555 op2b);
1556 if (t)
1557 return t;
1560 /* If the definition is an AND or OR expression, we may be able to
1561 simplify by reassociating. */
1562 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1563 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1565 tree inner1 = gimple_assign_rhs1 (stmt);
1566 tree inner2 = gimple_assign_rhs2 (stmt);
1567 gimple s;
1568 tree t;
1569 tree partial = NULL_TREE;
1570 bool is_and = (innercode == BIT_AND_EXPR);
1572 /* Check for boolean identities that don't require recursive examination
1573 of inner1/inner2:
1574 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1575 inner1 AND (inner1 OR inner2) => inner1
1576 !inner1 AND (inner1 AND inner2) => false
1577 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1578 Likewise for similar cases involving inner2. */
1579 if (inner1 == true_test_var)
1580 return (is_and ? var : inner1);
1581 else if (inner2 == true_test_var)
1582 return (is_and ? var : inner2);
1583 else if (inner1 == false_test_var)
1584 return (is_and
1585 ? boolean_false_node
1586 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1587 else if (inner2 == false_test_var)
1588 return (is_and
1589 ? boolean_false_node
1590 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1592 /* Next, redistribute/reassociate the AND across the inner tests.
1593 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1594 if (TREE_CODE (inner1) == SSA_NAME
1595 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1596 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1597 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1598 gimple_assign_rhs1 (s),
1599 gimple_assign_rhs2 (s),
1600 code2, op2a, op2b)))
1602 /* Handle the AND case, where we are reassociating:
1603 (inner1 AND inner2) AND (op2a code2 op2b)
1604 => (t AND inner2)
1605 If the partial result t is a constant, we win. Otherwise
1606 continue on to try reassociating with the other inner test. */
1607 if (is_and)
1609 if (integer_onep (t))
1610 return inner2;
1611 else if (integer_zerop (t))
1612 return boolean_false_node;
1615 /* Handle the OR case, where we are redistributing:
1616 (inner1 OR inner2) AND (op2a code2 op2b)
1617 => (t OR (inner2 AND (op2a code2 op2b))) */
1618 else if (integer_onep (t))
1619 return boolean_true_node;
1621 /* Save partial result for later. */
1622 partial = t;
1625 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1626 if (TREE_CODE (inner2) == SSA_NAME
1627 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1628 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1629 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1630 gimple_assign_rhs1 (s),
1631 gimple_assign_rhs2 (s),
1632 code2, op2a, op2b)))
1634 /* Handle the AND case, where we are reassociating:
1635 (inner1 AND inner2) AND (op2a code2 op2b)
1636 => (inner1 AND t) */
1637 if (is_and)
1639 if (integer_onep (t))
1640 return inner1;
1641 else if (integer_zerop (t))
1642 return boolean_false_node;
1643 /* If both are the same, we can apply the identity
1644 (x AND x) == x. */
1645 else if (partial && same_bool_result_p (t, partial))
1646 return t;
1649 /* Handle the OR case. where we are redistributing:
1650 (inner1 OR inner2) AND (op2a code2 op2b)
1651 => (t OR (inner1 AND (op2a code2 op2b)))
1652 => (t OR partial) */
1653 else
1655 if (integer_onep (t))
1656 return boolean_true_node;
1657 else if (partial)
1659 /* We already got a simplification for the other
1660 operand to the redistributed OR expression. The
1661 interesting case is when at least one is false.
1662 Or, if both are the same, we can apply the identity
1663 (x OR x) == x. */
1664 if (integer_zerop (partial))
1665 return t;
1666 else if (integer_zerop (t))
1667 return partial;
1668 else if (same_bool_result_p (t, partial))
1669 return t;
1674 return NULL_TREE;
1677 /* Try to simplify the AND of two comparisons defined by
1678 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1679 If this can be done without constructing an intermediate value,
1680 return the resulting tree; otherwise NULL_TREE is returned.
1681 This function is deliberately asymmetric as it recurses on SSA_DEFs
1682 in the first comparison but not the second. */
1684 static tree
1685 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1686 enum tree_code code2, tree op2a, tree op2b)
1688 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1690 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1691 if (operand_equal_p (op1a, op2a, 0)
1692 && operand_equal_p (op1b, op2b, 0))
1694 /* Result will be either NULL_TREE, or a combined comparison. */
1695 tree t = combine_comparisons (UNKNOWN_LOCATION,
1696 TRUTH_ANDIF_EXPR, code1, code2,
1697 truth_type, op1a, op1b);
1698 if (t)
1699 return t;
1702 /* Likewise the swapped case of the above. */
1703 if (operand_equal_p (op1a, op2b, 0)
1704 && operand_equal_p (op1b, op2a, 0))
1706 /* Result will be either NULL_TREE, or a combined comparison. */
1707 tree t = combine_comparisons (UNKNOWN_LOCATION,
1708 TRUTH_ANDIF_EXPR, code1,
1709 swap_tree_comparison (code2),
1710 truth_type, op1a, op1b);
1711 if (t)
1712 return t;
1715 /* If both comparisons are of the same value against constants, we might
1716 be able to merge them. */
1717 if (operand_equal_p (op1a, op2a, 0)
1718 && TREE_CODE (op1b) == INTEGER_CST
1719 && TREE_CODE (op2b) == INTEGER_CST)
1721 int cmp = tree_int_cst_compare (op1b, op2b);
1723 /* If we have (op1a == op1b), we should either be able to
1724 return that or FALSE, depending on whether the constant op1b
1725 also satisfies the other comparison against op2b. */
1726 if (code1 == EQ_EXPR)
1728 bool done = true;
1729 bool val;
1730 switch (code2)
1732 case EQ_EXPR: val = (cmp == 0); break;
1733 case NE_EXPR: val = (cmp != 0); break;
1734 case LT_EXPR: val = (cmp < 0); break;
1735 case GT_EXPR: val = (cmp > 0); break;
1736 case LE_EXPR: val = (cmp <= 0); break;
1737 case GE_EXPR: val = (cmp >= 0); break;
1738 default: done = false;
1740 if (done)
1742 if (val)
1743 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1744 else
1745 return boolean_false_node;
1748 /* Likewise if the second comparison is an == comparison. */
1749 else if (code2 == EQ_EXPR)
1751 bool done = true;
1752 bool val;
1753 switch (code1)
1755 case EQ_EXPR: val = (cmp == 0); break;
1756 case NE_EXPR: val = (cmp != 0); break;
1757 case LT_EXPR: val = (cmp > 0); break;
1758 case GT_EXPR: val = (cmp < 0); break;
1759 case LE_EXPR: val = (cmp >= 0); break;
1760 case GE_EXPR: val = (cmp <= 0); break;
1761 default: done = false;
1763 if (done)
1765 if (val)
1766 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1767 else
1768 return boolean_false_node;
1772 /* Same business with inequality tests. */
1773 else if (code1 == NE_EXPR)
1775 bool val;
1776 switch (code2)
1778 case EQ_EXPR: val = (cmp != 0); break;
1779 case NE_EXPR: val = (cmp == 0); break;
1780 case LT_EXPR: val = (cmp >= 0); break;
1781 case GT_EXPR: val = (cmp <= 0); break;
1782 case LE_EXPR: val = (cmp > 0); break;
1783 case GE_EXPR: val = (cmp < 0); break;
1784 default:
1785 val = false;
1787 if (val)
1788 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1790 else if (code2 == NE_EXPR)
1792 bool val;
1793 switch (code1)
1795 case EQ_EXPR: val = (cmp == 0); break;
1796 case NE_EXPR: val = (cmp != 0); break;
1797 case LT_EXPR: val = (cmp <= 0); break;
1798 case GT_EXPR: val = (cmp >= 0); break;
1799 case LE_EXPR: val = (cmp < 0); break;
1800 case GE_EXPR: val = (cmp > 0); break;
1801 default:
1802 val = false;
1804 if (val)
1805 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1808 /* Chose the more restrictive of two < or <= comparisons. */
1809 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1810 && (code2 == LT_EXPR || code2 == LE_EXPR))
1812 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1813 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1814 else
1815 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1818 /* Likewise chose the more restrictive of two > or >= comparisons. */
1819 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1820 && (code2 == GT_EXPR || code2 == GE_EXPR))
1822 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1823 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1824 else
1825 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1828 /* Check for singleton ranges. */
1829 else if (cmp == 0
1830 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1831 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1832 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1834 /* Check for disjoint ranges. */
1835 else if (cmp <= 0
1836 && (code1 == LT_EXPR || code1 == LE_EXPR)
1837 && (code2 == GT_EXPR || code2 == GE_EXPR))
1838 return boolean_false_node;
1839 else if (cmp >= 0
1840 && (code1 == GT_EXPR || code1 == GE_EXPR)
1841 && (code2 == LT_EXPR || code2 == LE_EXPR))
1842 return boolean_false_node;
1845 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1846 NAME's definition is a truth value. See if there are any simplifications
1847 that can be done against the NAME's definition. */
1848 if (TREE_CODE (op1a) == SSA_NAME
1849 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1850 && (integer_zerop (op1b) || integer_onep (op1b)))
1852 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1853 || (code1 == NE_EXPR && integer_onep (op1b)));
1854 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1855 switch (gimple_code (stmt))
1857 case GIMPLE_ASSIGN:
1858 /* Try to simplify by copy-propagating the definition. */
1859 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1861 case GIMPLE_PHI:
1862 /* If every argument to the PHI produces the same result when
1863 ANDed with the second comparison, we win.
1864 Do not do this unless the type is bool since we need a bool
1865 result here anyway. */
1866 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1868 tree result = NULL_TREE;
1869 unsigned i;
1870 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1872 tree arg = gimple_phi_arg_def (stmt, i);
1874 /* If this PHI has itself as an argument, ignore it.
1875 If all the other args produce the same result,
1876 we're still OK. */
1877 if (arg == gimple_phi_result (stmt))
1878 continue;
1879 else if (TREE_CODE (arg) == INTEGER_CST)
1881 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1883 if (!result)
1884 result = boolean_false_node;
1885 else if (!integer_zerop (result))
1886 return NULL_TREE;
1888 else if (!result)
1889 result = fold_build2 (code2, boolean_type_node,
1890 op2a, op2b);
1891 else if (!same_bool_comparison_p (result,
1892 code2, op2a, op2b))
1893 return NULL_TREE;
1895 else if (TREE_CODE (arg) == SSA_NAME
1896 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1898 tree temp;
1899 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1900 /* In simple cases we can look through PHI nodes,
1901 but we have to be careful with loops.
1902 See PR49073. */
1903 if (! dom_info_available_p (CDI_DOMINATORS)
1904 || gimple_bb (def_stmt) == gimple_bb (stmt)
1905 || dominated_by_p (CDI_DOMINATORS,
1906 gimple_bb (def_stmt),
1907 gimple_bb (stmt)))
1908 return NULL_TREE;
1909 temp = and_var_with_comparison (arg, invert, code2,
1910 op2a, op2b);
1911 if (!temp)
1912 return NULL_TREE;
1913 else if (!result)
1914 result = temp;
1915 else if (!same_bool_result_p (result, temp))
1916 return NULL_TREE;
1918 else
1919 return NULL_TREE;
1921 return result;
1924 default:
1925 break;
1928 return NULL_TREE;
1931 /* Try to simplify the AND of two comparisons, specified by
1932 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1933 If this can be simplified to a single expression (without requiring
1934 introducing more SSA variables to hold intermediate values),
1935 return the resulting tree. Otherwise return NULL_TREE.
1936 If the result expression is non-null, it has boolean type. */
1938 tree
1939 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1940 enum tree_code code2, tree op2a, tree op2b)
1942 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1943 if (t)
1944 return t;
1945 else
1946 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1949 /* Helper function for or_comparisons_1: try to simplify the OR of the
1950 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1951 If INVERT is true, invert the value of VAR before doing the OR.
1952 Return NULL_EXPR if we can't simplify this to a single expression. */
1954 static tree
1955 or_var_with_comparison (tree var, bool invert,
1956 enum tree_code code2, tree op2a, tree op2b)
1958 tree t;
1959 gimple stmt = SSA_NAME_DEF_STMT (var);
1961 /* We can only deal with variables whose definitions are assignments. */
1962 if (!is_gimple_assign (stmt))
1963 return NULL_TREE;
1965 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1966 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1967 Then we only have to consider the simpler non-inverted cases. */
1968 if (invert)
1969 t = and_var_with_comparison_1 (stmt,
1970 invert_tree_comparison (code2, false),
1971 op2a, op2b);
1972 else
1973 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1974 return canonicalize_bool (t, invert);
1977 /* Try to simplify the OR of the ssa variable defined by the assignment
1978 STMT with the comparison specified by (OP2A CODE2 OP2B).
1979 Return NULL_EXPR if we can't simplify this to a single expression. */
1981 static tree
1982 or_var_with_comparison_1 (gimple stmt,
1983 enum tree_code code2, tree op2a, tree op2b)
1985 tree var = gimple_assign_lhs (stmt);
1986 tree true_test_var = NULL_TREE;
1987 tree false_test_var = NULL_TREE;
1988 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1990 /* Check for identities like (var OR (var != 0)) => true . */
1991 if (TREE_CODE (op2a) == SSA_NAME
1992 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1994 if ((code2 == NE_EXPR && integer_zerop (op2b))
1995 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1997 true_test_var = op2a;
1998 if (var == true_test_var)
1999 return var;
2001 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2002 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2004 false_test_var = op2a;
2005 if (var == false_test_var)
2006 return boolean_true_node;
2010 /* If the definition is a comparison, recurse on it. */
2011 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2013 tree t = or_comparisons_1 (innercode,
2014 gimple_assign_rhs1 (stmt),
2015 gimple_assign_rhs2 (stmt),
2016 code2,
2017 op2a,
2018 op2b);
2019 if (t)
2020 return t;
2023 /* If the definition is an AND or OR expression, we may be able to
2024 simplify by reassociating. */
2025 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2026 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2028 tree inner1 = gimple_assign_rhs1 (stmt);
2029 tree inner2 = gimple_assign_rhs2 (stmt);
2030 gimple s;
2031 tree t;
2032 tree partial = NULL_TREE;
2033 bool is_or = (innercode == BIT_IOR_EXPR);
2035 /* Check for boolean identities that don't require recursive examination
2036 of inner1/inner2:
2037 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2038 inner1 OR (inner1 AND inner2) => inner1
2039 !inner1 OR (inner1 OR inner2) => true
2040 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2042 if (inner1 == true_test_var)
2043 return (is_or ? var : inner1);
2044 else if (inner2 == true_test_var)
2045 return (is_or ? var : inner2);
2046 else if (inner1 == false_test_var)
2047 return (is_or
2048 ? boolean_true_node
2049 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2050 else if (inner2 == false_test_var)
2051 return (is_or
2052 ? boolean_true_node
2053 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2055 /* Next, redistribute/reassociate the OR across the inner tests.
2056 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2057 if (TREE_CODE (inner1) == SSA_NAME
2058 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2059 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2060 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2061 gimple_assign_rhs1 (s),
2062 gimple_assign_rhs2 (s),
2063 code2, op2a, op2b)))
2065 /* Handle the OR case, where we are reassociating:
2066 (inner1 OR inner2) OR (op2a code2 op2b)
2067 => (t OR inner2)
2068 If the partial result t is a constant, we win. Otherwise
2069 continue on to try reassociating with the other inner test. */
2070 if (is_or)
2072 if (integer_onep (t))
2073 return boolean_true_node;
2074 else if (integer_zerop (t))
2075 return inner2;
2078 /* Handle the AND case, where we are redistributing:
2079 (inner1 AND inner2) OR (op2a code2 op2b)
2080 => (t AND (inner2 OR (op2a code op2b))) */
2081 else if (integer_zerop (t))
2082 return boolean_false_node;
2084 /* Save partial result for later. */
2085 partial = t;
2088 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2089 if (TREE_CODE (inner2) == SSA_NAME
2090 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2091 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2092 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2093 gimple_assign_rhs1 (s),
2094 gimple_assign_rhs2 (s),
2095 code2, op2a, op2b)))
2097 /* Handle the OR case, where we are reassociating:
2098 (inner1 OR inner2) OR (op2a code2 op2b)
2099 => (inner1 OR t)
2100 => (t OR partial) */
2101 if (is_or)
2103 if (integer_zerop (t))
2104 return inner1;
2105 else if (integer_onep (t))
2106 return boolean_true_node;
2107 /* If both are the same, we can apply the identity
2108 (x OR x) == x. */
2109 else if (partial && same_bool_result_p (t, partial))
2110 return t;
2113 /* Handle the AND case, where we are redistributing:
2114 (inner1 AND inner2) OR (op2a code2 op2b)
2115 => (t AND (inner1 OR (op2a code2 op2b)))
2116 => (t AND partial) */
2117 else
2119 if (integer_zerop (t))
2120 return boolean_false_node;
2121 else if (partial)
2123 /* We already got a simplification for the other
2124 operand to the redistributed AND expression. The
2125 interesting case is when at least one is true.
2126 Or, if both are the same, we can apply the identity
2127 (x AND x) == x. */
2128 if (integer_onep (partial))
2129 return t;
2130 else if (integer_onep (t))
2131 return partial;
2132 else if (same_bool_result_p (t, partial))
2133 return t;
2138 return NULL_TREE;
2141 /* Try to simplify the OR of two comparisons defined by
2142 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2143 If this can be done without constructing an intermediate value,
2144 return the resulting tree; otherwise NULL_TREE is returned.
2145 This function is deliberately asymmetric as it recurses on SSA_DEFs
2146 in the first comparison but not the second. */
2148 static tree
2149 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2150 enum tree_code code2, tree op2a, tree op2b)
2152 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2154 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2155 if (operand_equal_p (op1a, op2a, 0)
2156 && operand_equal_p (op1b, op2b, 0))
2158 /* Result will be either NULL_TREE, or a combined comparison. */
2159 tree t = combine_comparisons (UNKNOWN_LOCATION,
2160 TRUTH_ORIF_EXPR, code1, code2,
2161 truth_type, op1a, op1b);
2162 if (t)
2163 return t;
2166 /* Likewise the swapped case of the above. */
2167 if (operand_equal_p (op1a, op2b, 0)
2168 && operand_equal_p (op1b, op2a, 0))
2170 /* Result will be either NULL_TREE, or a combined comparison. */
2171 tree t = combine_comparisons (UNKNOWN_LOCATION,
2172 TRUTH_ORIF_EXPR, code1,
2173 swap_tree_comparison (code2),
2174 truth_type, op1a, op1b);
2175 if (t)
2176 return t;
2179 /* If both comparisons are of the same value against constants, we might
2180 be able to merge them. */
2181 if (operand_equal_p (op1a, op2a, 0)
2182 && TREE_CODE (op1b) == INTEGER_CST
2183 && TREE_CODE (op2b) == INTEGER_CST)
2185 int cmp = tree_int_cst_compare (op1b, op2b);
2187 /* If we have (op1a != op1b), we should either be able to
2188 return that or TRUE, depending on whether the constant op1b
2189 also satisfies the other comparison against op2b. */
2190 if (code1 == NE_EXPR)
2192 bool done = true;
2193 bool val;
2194 switch (code2)
2196 case EQ_EXPR: val = (cmp == 0); break;
2197 case NE_EXPR: val = (cmp != 0); break;
2198 case LT_EXPR: val = (cmp < 0); break;
2199 case GT_EXPR: val = (cmp > 0); break;
2200 case LE_EXPR: val = (cmp <= 0); break;
2201 case GE_EXPR: val = (cmp >= 0); break;
2202 default: done = false;
2204 if (done)
2206 if (val)
2207 return boolean_true_node;
2208 else
2209 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2212 /* Likewise if the second comparison is a != comparison. */
2213 else if (code2 == NE_EXPR)
2215 bool done = true;
2216 bool val;
2217 switch (code1)
2219 case EQ_EXPR: val = (cmp == 0); break;
2220 case NE_EXPR: val = (cmp != 0); break;
2221 case LT_EXPR: val = (cmp > 0); break;
2222 case GT_EXPR: val = (cmp < 0); break;
2223 case LE_EXPR: val = (cmp >= 0); break;
2224 case GE_EXPR: val = (cmp <= 0); break;
2225 default: done = false;
2227 if (done)
2229 if (val)
2230 return boolean_true_node;
2231 else
2232 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2236 /* See if an equality test is redundant with the other comparison. */
2237 else if (code1 == EQ_EXPR)
2239 bool val;
2240 switch (code2)
2242 case EQ_EXPR: val = (cmp == 0); break;
2243 case NE_EXPR: val = (cmp != 0); break;
2244 case LT_EXPR: val = (cmp < 0); break;
2245 case GT_EXPR: val = (cmp > 0); break;
2246 case LE_EXPR: val = (cmp <= 0); break;
2247 case GE_EXPR: val = (cmp >= 0); break;
2248 default:
2249 val = false;
2251 if (val)
2252 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2254 else if (code2 == EQ_EXPR)
2256 bool val;
2257 switch (code1)
2259 case EQ_EXPR: val = (cmp == 0); break;
2260 case NE_EXPR: val = (cmp != 0); break;
2261 case LT_EXPR: val = (cmp > 0); break;
2262 case GT_EXPR: val = (cmp < 0); break;
2263 case LE_EXPR: val = (cmp >= 0); break;
2264 case GE_EXPR: val = (cmp <= 0); break;
2265 default:
2266 val = false;
2268 if (val)
2269 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2272 /* Chose the less restrictive of two < or <= comparisons. */
2273 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2274 && (code2 == LT_EXPR || code2 == LE_EXPR))
2276 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2277 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2278 else
2279 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2282 /* Likewise chose the less restrictive of two > or >= comparisons. */
2283 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2284 && (code2 == GT_EXPR || code2 == GE_EXPR))
2286 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2287 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2288 else
2289 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2292 /* Check for singleton ranges. */
2293 else if (cmp == 0
2294 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2295 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2296 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2298 /* Check for less/greater pairs that don't restrict the range at all. */
2299 else if (cmp >= 0
2300 && (code1 == LT_EXPR || code1 == LE_EXPR)
2301 && (code2 == GT_EXPR || code2 == GE_EXPR))
2302 return boolean_true_node;
2303 else if (cmp <= 0
2304 && (code1 == GT_EXPR || code1 == GE_EXPR)
2305 && (code2 == LT_EXPR || code2 == LE_EXPR))
2306 return boolean_true_node;
2309 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2310 NAME's definition is a truth value. See if there are any simplifications
2311 that can be done against the NAME's definition. */
2312 if (TREE_CODE (op1a) == SSA_NAME
2313 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2314 && (integer_zerop (op1b) || integer_onep (op1b)))
2316 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2317 || (code1 == NE_EXPR && integer_onep (op1b)));
2318 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2319 switch (gimple_code (stmt))
2321 case GIMPLE_ASSIGN:
2322 /* Try to simplify by copy-propagating the definition. */
2323 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2325 case GIMPLE_PHI:
2326 /* If every argument to the PHI produces the same result when
2327 ORed with the second comparison, we win.
2328 Do not do this unless the type is bool since we need a bool
2329 result here anyway. */
2330 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2332 tree result = NULL_TREE;
2333 unsigned i;
2334 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2336 tree arg = gimple_phi_arg_def (stmt, i);
2338 /* If this PHI has itself as an argument, ignore it.
2339 If all the other args produce the same result,
2340 we're still OK. */
2341 if (arg == gimple_phi_result (stmt))
2342 continue;
2343 else if (TREE_CODE (arg) == INTEGER_CST)
2345 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2347 if (!result)
2348 result = boolean_true_node;
2349 else if (!integer_onep (result))
2350 return NULL_TREE;
2352 else if (!result)
2353 result = fold_build2 (code2, boolean_type_node,
2354 op2a, op2b);
2355 else if (!same_bool_comparison_p (result,
2356 code2, op2a, op2b))
2357 return NULL_TREE;
2359 else if (TREE_CODE (arg) == SSA_NAME
2360 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2362 tree temp;
2363 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2364 /* In simple cases we can look through PHI nodes,
2365 but we have to be careful with loops.
2366 See PR49073. */
2367 if (! dom_info_available_p (CDI_DOMINATORS)
2368 || gimple_bb (def_stmt) == gimple_bb (stmt)
2369 || dominated_by_p (CDI_DOMINATORS,
2370 gimple_bb (def_stmt),
2371 gimple_bb (stmt)))
2372 return NULL_TREE;
2373 temp = or_var_with_comparison (arg, invert, code2,
2374 op2a, op2b);
2375 if (!temp)
2376 return NULL_TREE;
2377 else if (!result)
2378 result = temp;
2379 else if (!same_bool_result_p (result, temp))
2380 return NULL_TREE;
2382 else
2383 return NULL_TREE;
2385 return result;
2388 default:
2389 break;
2392 return NULL_TREE;
2395 /* Try to simplify the OR of two comparisons, specified by
2396 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2397 If this can be simplified to a single expression (without requiring
2398 introducing more SSA variables to hold intermediate values),
2399 return the resulting tree. Otherwise return NULL_TREE.
2400 If the result expression is non-null, it has boolean type. */
2402 tree
2403 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2404 enum tree_code code2, tree op2a, tree op2b)
2406 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2407 if (t)
2408 return t;
2409 else
2410 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2414 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2416 Either NULL_TREE, a simplified but non-constant or a constant
2417 is returned.
2419 ??? This should go into a gimple-fold-inline.h file to be eventually
2420 privatized with the single valueize function used in the various TUs
2421 to avoid the indirect function call overhead. */
2423 tree
2424 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2426 location_t loc = gimple_location (stmt);
2427 switch (gimple_code (stmt))
2429 case GIMPLE_ASSIGN:
2431 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2433 switch (get_gimple_rhs_class (subcode))
2435 case GIMPLE_SINGLE_RHS:
2437 tree rhs = gimple_assign_rhs1 (stmt);
2438 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2440 if (TREE_CODE (rhs) == SSA_NAME)
2442 /* If the RHS is an SSA_NAME, return its known constant value,
2443 if any. */
2444 return (*valueize) (rhs);
2446 /* Handle propagating invariant addresses into address
2447 operations. */
2448 else if (TREE_CODE (rhs) == ADDR_EXPR
2449 && !is_gimple_min_invariant (rhs))
2451 HOST_WIDE_INT offset = 0;
2452 tree base;
2453 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2454 &offset,
2455 valueize);
2456 if (base
2457 && (CONSTANT_CLASS_P (base)
2458 || decl_address_invariant_p (base)))
2459 return build_invariant_address (TREE_TYPE (rhs),
2460 base, offset);
2462 else if (TREE_CODE (rhs) == CONSTRUCTOR
2463 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2464 && (CONSTRUCTOR_NELTS (rhs)
2465 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2467 unsigned i;
2468 tree val, *vec;
2470 vec = XALLOCAVEC (tree,
2471 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2472 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2474 val = (*valueize) (val);
2475 if (TREE_CODE (val) == INTEGER_CST
2476 || TREE_CODE (val) == REAL_CST
2477 || TREE_CODE (val) == FIXED_CST)
2478 vec[i] = val;
2479 else
2480 return NULL_TREE;
2483 return build_vector (TREE_TYPE (rhs), vec);
2486 if (kind == tcc_reference)
2488 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2489 || TREE_CODE (rhs) == REALPART_EXPR
2490 || TREE_CODE (rhs) == IMAGPART_EXPR)
2491 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2493 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2494 return fold_unary_loc (EXPR_LOCATION (rhs),
2495 TREE_CODE (rhs),
2496 TREE_TYPE (rhs), val);
2498 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2499 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2501 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2502 return fold_ternary_loc (EXPR_LOCATION (rhs),
2503 TREE_CODE (rhs),
2504 TREE_TYPE (rhs), val,
2505 TREE_OPERAND (rhs, 1),
2506 TREE_OPERAND (rhs, 2));
2508 else if (TREE_CODE (rhs) == MEM_REF
2509 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2511 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2512 if (TREE_CODE (val) == ADDR_EXPR
2513 && is_gimple_min_invariant (val))
2515 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2516 unshare_expr (val),
2517 TREE_OPERAND (rhs, 1));
2518 if (tem)
2519 rhs = tem;
2522 return fold_const_aggregate_ref_1 (rhs, valueize);
2524 else if (kind == tcc_declaration)
2525 return get_symbol_constant_value (rhs);
2526 return rhs;
2529 case GIMPLE_UNARY_RHS:
2531 /* Handle unary operators that can appear in GIMPLE form.
2532 Note that we know the single operand must be a constant,
2533 so this should almost always return a simplified RHS. */
2534 tree lhs = gimple_assign_lhs (stmt);
2535 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2537 /* Conversions are useless for CCP purposes if they are
2538 value-preserving. Thus the restrictions that
2539 useless_type_conversion_p places for restrict qualification
2540 of pointer types should not apply here.
2541 Substitution later will only substitute to allowed places. */
2542 if (CONVERT_EXPR_CODE_P (subcode)
2543 && POINTER_TYPE_P (TREE_TYPE (lhs))
2544 && POINTER_TYPE_P (TREE_TYPE (op0))
2545 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2546 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2547 && TYPE_MODE (TREE_TYPE (lhs))
2548 == TYPE_MODE (TREE_TYPE (op0)))
2549 return op0;
2551 return
2552 fold_unary_ignore_overflow_loc (loc, subcode,
2553 gimple_expr_type (stmt), op0);
2556 case GIMPLE_BINARY_RHS:
2558 /* Handle binary operators that can appear in GIMPLE form. */
2559 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2560 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2562 /* Translate &x + CST into an invariant form suitable for
2563 further propagation. */
2564 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2565 && TREE_CODE (op0) == ADDR_EXPR
2566 && TREE_CODE (op1) == INTEGER_CST)
2568 tree off = fold_convert (ptr_type_node, op1);
2569 return build_fold_addr_expr_loc
2570 (loc,
2571 fold_build2 (MEM_REF,
2572 TREE_TYPE (TREE_TYPE (op0)),
2573 unshare_expr (op0), off));
2576 return fold_binary_loc (loc, subcode,
2577 gimple_expr_type (stmt), op0, op1);
2580 case GIMPLE_TERNARY_RHS:
2582 /* Handle ternary operators that can appear in GIMPLE form. */
2583 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2584 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2585 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2587 /* Fold embedded expressions in ternary codes. */
2588 if ((subcode == COND_EXPR
2589 || subcode == VEC_COND_EXPR)
2590 && COMPARISON_CLASS_P (op0))
2592 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2593 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2594 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2595 TREE_TYPE (op0), op00, op01);
2596 if (tem)
2597 op0 = tem;
2600 return fold_ternary_loc (loc, subcode,
2601 gimple_expr_type (stmt), op0, op1, op2);
2604 default:
2605 gcc_unreachable ();
2609 case GIMPLE_CALL:
2611 tree fn;
2613 if (gimple_call_internal_p (stmt))
2614 /* No folding yet for these functions. */
2615 return NULL_TREE;
2617 fn = (*valueize) (gimple_call_fn (stmt));
2618 if (TREE_CODE (fn) == ADDR_EXPR
2619 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2620 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2622 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2623 tree call, retval;
2624 unsigned i;
2625 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2626 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2627 call = build_call_array_loc (loc,
2628 gimple_call_return_type (stmt),
2629 fn, gimple_call_num_args (stmt), args);
2630 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2631 if (retval)
2632 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2633 STRIP_NOPS (retval);
2634 return retval;
2636 return NULL_TREE;
2639 default:
2640 return NULL_TREE;
2644 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2645 Returns NULL_TREE if folding to a constant is not possible, otherwise
2646 returns a constant according to is_gimple_min_invariant. */
2648 tree
2649 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2651 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2652 if (res && is_gimple_min_invariant (res))
2653 return res;
2654 return NULL_TREE;
2658 /* The following set of functions are supposed to fold references using
2659 their constant initializers. */
2661 static tree fold_ctor_reference (tree type, tree ctor,
2662 unsigned HOST_WIDE_INT offset,
2663 unsigned HOST_WIDE_INT size, tree);
2665 /* See if we can find constructor defining value of BASE.
2666 When we know the consructor with constant offset (such as
2667 base is array[40] and we do know constructor of array), then
2668 BIT_OFFSET is adjusted accordingly.
2670 As a special case, return error_mark_node when constructor
2671 is not explicitly available, but it is known to be zero
2672 such as 'static const int a;'. */
2673 static tree
2674 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2675 tree (*valueize)(tree))
2677 HOST_WIDE_INT bit_offset2, size, max_size;
2678 if (TREE_CODE (base) == MEM_REF)
2680 if (!integer_zerop (TREE_OPERAND (base, 1)))
2682 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2683 return NULL_TREE;
2684 *bit_offset += (mem_ref_offset (base).low
2685 * BITS_PER_UNIT);
2688 if (valueize
2689 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2690 base = valueize (TREE_OPERAND (base, 0));
2691 if (!base || TREE_CODE (base) != ADDR_EXPR)
2692 return NULL_TREE;
2693 base = TREE_OPERAND (base, 0);
2696 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2697 DECL_INITIAL. If BASE is a nested reference into another
2698 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2699 the inner reference. */
2700 switch (TREE_CODE (base))
2702 case VAR_DECL:
2703 if (!const_value_known_p (base))
2704 return NULL_TREE;
2706 /* Fallthru. */
2707 case CONST_DECL:
2708 if (!DECL_INITIAL (base)
2709 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2710 return error_mark_node;
2711 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2712 as special marker (_not_ zero ...) for its own purposes. */
2713 if (DECL_INITIAL (base) == error_mark_node)
2714 return NULL_TREE;
2715 return DECL_INITIAL (base);
2717 case ARRAY_REF:
2718 case COMPONENT_REF:
2719 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2720 if (max_size == -1 || size != max_size)
2721 return NULL_TREE;
2722 *bit_offset += bit_offset2;
2723 return get_base_constructor (base, bit_offset, valueize);
2725 case STRING_CST:
2726 case CONSTRUCTOR:
2727 return base;
2729 default:
2730 return NULL_TREE;
2734 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2735 to the memory at bit OFFSET.
2737 We do only simple job of folding byte accesses. */
2739 static tree
2740 fold_string_cst_ctor_reference (tree type, tree ctor,
2741 unsigned HOST_WIDE_INT offset,
2742 unsigned HOST_WIDE_INT size)
2744 if (INTEGRAL_TYPE_P (type)
2745 && (TYPE_MODE (type)
2746 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2747 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2748 == MODE_INT)
2749 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2750 && size == BITS_PER_UNIT
2751 && !(offset % BITS_PER_UNIT))
2753 offset /= BITS_PER_UNIT;
2754 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2755 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2756 [offset]));
2757 /* Folding
2758 const char a[20]="hello";
2759 return a[10];
2761 might lead to offset greater than string length. In this case we
2762 know value is either initialized to 0 or out of bounds. Return 0
2763 in both cases. */
2764 return build_zero_cst (type);
2766 return NULL_TREE;
2769 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2770 SIZE to the memory at bit OFFSET. */
2772 static tree
2773 fold_array_ctor_reference (tree type, tree ctor,
2774 unsigned HOST_WIDE_INT offset,
2775 unsigned HOST_WIDE_INT size,
2776 tree from_decl)
2778 unsigned HOST_WIDE_INT cnt;
2779 tree cfield, cval;
2780 double_int low_bound, elt_size;
2781 double_int index, max_index;
2782 double_int access_index;
2783 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2784 HOST_WIDE_INT inner_offset;
2786 /* Compute low bound and elt size. */
2787 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2788 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2789 if (domain_type && TYPE_MIN_VALUE (domain_type))
2791 /* Static constructors for variably sized objects makes no sense. */
2792 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2793 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2794 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2796 else
2797 low_bound = double_int_zero;
2798 /* Static constructors for variably sized objects makes no sense. */
2799 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2800 == INTEGER_CST);
2801 elt_size =
2802 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2805 /* We can handle only constantly sized accesses that are known to not
2806 be larger than size of array element. */
2807 if (!TYPE_SIZE_UNIT (type)
2808 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2809 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2810 return NULL_TREE;
2812 /* Compute the array index we look for. */
2813 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2814 .udiv (elt_size, TRUNC_DIV_EXPR);
2815 access_index += low_bound;
2816 if (index_type)
2817 access_index = access_index.ext (TYPE_PRECISION (index_type),
2818 TYPE_UNSIGNED (index_type));
2820 /* And offset within the access. */
2821 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2823 /* See if the array field is large enough to span whole access. We do not
2824 care to fold accesses spanning multiple array indexes. */
2825 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2826 return NULL_TREE;
2828 index = low_bound - double_int_one;
2829 if (index_type)
2830 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2832 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2834 /* Array constructor might explicitely set index, or specify range
2835 or leave index NULL meaning that it is next index after previous
2836 one. */
2837 if (cfield)
2839 if (TREE_CODE (cfield) == INTEGER_CST)
2840 max_index = index = tree_to_double_int (cfield);
2841 else
2843 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2844 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2845 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2848 else
2850 index += double_int_one;
2851 if (index_type)
2852 index = index.ext (TYPE_PRECISION (index_type),
2853 TYPE_UNSIGNED (index_type));
2854 max_index = index;
2857 /* Do we have match? */
2858 if (access_index.cmp (index, 1) >= 0
2859 && access_index.cmp (max_index, 1) <= 0)
2860 return fold_ctor_reference (type, cval, inner_offset, size,
2861 from_decl);
2863 /* When memory is not explicitely mentioned in constructor,
2864 it is 0 (or out of range). */
2865 return build_zero_cst (type);
2868 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2869 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2871 static tree
2872 fold_nonarray_ctor_reference (tree type, tree ctor,
2873 unsigned HOST_WIDE_INT offset,
2874 unsigned HOST_WIDE_INT size,
2875 tree from_decl)
2877 unsigned HOST_WIDE_INT cnt;
2878 tree cfield, cval;
2880 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2881 cval)
2883 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2884 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2885 tree field_size = DECL_SIZE (cfield);
2886 double_int bitoffset;
2887 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2888 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2889 double_int bitoffset_end, access_end;
2891 /* Variable sized objects in static constructors makes no sense,
2892 but field_size can be NULL for flexible array members. */
2893 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2894 && TREE_CODE (byte_offset) == INTEGER_CST
2895 && (field_size != NULL_TREE
2896 ? TREE_CODE (field_size) == INTEGER_CST
2897 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2899 /* Compute bit offset of the field. */
2900 bitoffset = tree_to_double_int (field_offset)
2901 + byte_offset_cst * bits_per_unit_cst;
2902 /* Compute bit offset where the field ends. */
2903 if (field_size != NULL_TREE)
2904 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2905 else
2906 bitoffset_end = double_int_zero;
2908 access_end = double_int::from_uhwi (offset)
2909 + double_int::from_uhwi (size);
2911 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2912 [BITOFFSET, BITOFFSET_END)? */
2913 if (access_end.cmp (bitoffset, 0) > 0
2914 && (field_size == NULL_TREE
2915 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2917 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2918 /* We do have overlap. Now see if field is large enough to
2919 cover the access. Give up for accesses spanning multiple
2920 fields. */
2921 if (access_end.cmp (bitoffset_end, 0) > 0)
2922 return NULL_TREE;
2923 if (double_int::from_uhwi (offset).slt (bitoffset))
2924 return NULL_TREE;
2925 return fold_ctor_reference (type, cval,
2926 inner_offset.to_uhwi (), size,
2927 from_decl);
2930 /* When memory is not explicitely mentioned in constructor, it is 0. */
2931 return build_zero_cst (type);
2934 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2935 to the memory at bit OFFSET. */
2937 static tree
2938 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2939 unsigned HOST_WIDE_INT size, tree from_decl)
2941 tree ret;
2943 /* We found the field with exact match. */
2944 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2945 && !offset)
2946 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2948 /* We are at the end of walk, see if we can view convert the
2949 result. */
2950 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2951 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2952 && operand_equal_p (TYPE_SIZE (type),
2953 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2955 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2956 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2957 if (ret)
2958 STRIP_NOPS (ret);
2959 return ret;
2961 if (TREE_CODE (ctor) == STRING_CST)
2962 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2963 if (TREE_CODE (ctor) == CONSTRUCTOR)
2966 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2967 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2968 return fold_array_ctor_reference (type, ctor, offset, size,
2969 from_decl);
2970 else
2971 return fold_nonarray_ctor_reference (type, ctor, offset, size,
2972 from_decl);
2975 return NULL_TREE;
2978 /* Return the tree representing the element referenced by T if T is an
2979 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2980 names using VALUEIZE. Return NULL_TREE otherwise. */
2982 tree
2983 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2985 tree ctor, idx, base;
2986 HOST_WIDE_INT offset, size, max_size;
2987 tree tem;
2989 if (TREE_THIS_VOLATILE (t))
2990 return NULL_TREE;
2992 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2993 return get_symbol_constant_value (t);
2995 tem = fold_read_from_constant_string (t);
2996 if (tem)
2997 return tem;
2999 switch (TREE_CODE (t))
3001 case ARRAY_REF:
3002 case ARRAY_RANGE_REF:
3003 /* Constant indexes are handled well by get_base_constructor.
3004 Only special case variable offsets.
3005 FIXME: This code can't handle nested references with variable indexes
3006 (they will be handled only by iteration of ccp). Perhaps we can bring
3007 get_ref_base_and_extent here and make it use a valueize callback. */
3008 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3009 && valueize
3010 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3011 && TREE_CODE (idx) == INTEGER_CST)
3013 tree low_bound, unit_size;
3014 double_int doffset;
3016 /* If the resulting bit-offset is constant, track it. */
3017 if ((low_bound = array_ref_low_bound (t),
3018 TREE_CODE (low_bound) == INTEGER_CST)
3019 && (unit_size = array_ref_element_size (t),
3020 host_integerp (unit_size, 1))
3021 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3022 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3023 doffset.fits_shwi ()))
3025 offset = doffset.to_shwi ();
3026 offset *= TREE_INT_CST_LOW (unit_size);
3027 offset *= BITS_PER_UNIT;
3029 base = TREE_OPERAND (t, 0);
3030 ctor = get_base_constructor (base, &offset, valueize);
3031 /* Empty constructor. Always fold to 0. */
3032 if (ctor == error_mark_node)
3033 return build_zero_cst (TREE_TYPE (t));
3034 /* Out of bound array access. Value is undefined,
3035 but don't fold. */
3036 if (offset < 0)
3037 return NULL_TREE;
3038 /* We can not determine ctor. */
3039 if (!ctor)
3040 return NULL_TREE;
3041 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3042 TREE_INT_CST_LOW (unit_size)
3043 * BITS_PER_UNIT,
3044 base);
3047 /* Fallthru. */
3049 case COMPONENT_REF:
3050 case BIT_FIELD_REF:
3051 case TARGET_MEM_REF:
3052 case MEM_REF:
3053 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3054 ctor = get_base_constructor (base, &offset, valueize);
3056 /* Empty constructor. Always fold to 0. */
3057 if (ctor == error_mark_node)
3058 return build_zero_cst (TREE_TYPE (t));
3059 /* We do not know precise address. */
3060 if (max_size == -1 || max_size != size)
3061 return NULL_TREE;
3062 /* We can not determine ctor. */
3063 if (!ctor)
3064 return NULL_TREE;
3066 /* Out of bound array access. Value is undefined, but don't fold. */
3067 if (offset < 0)
3068 return NULL_TREE;
3070 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3071 base);
3073 case REALPART_EXPR:
3074 case IMAGPART_EXPR:
3076 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3077 if (c && TREE_CODE (c) == COMPLEX_CST)
3078 return fold_build1_loc (EXPR_LOCATION (t),
3079 TREE_CODE (t), TREE_TYPE (t), c);
3080 break;
3083 default:
3084 break;
3087 return NULL_TREE;
3090 tree
3091 fold_const_aggregate_ref (tree t)
3093 return fold_const_aggregate_ref_1 (t, NULL);
3096 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3097 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3098 KNOWN_BINFO carries the binfo describing the true type of
3099 OBJ_TYPE_REF_OBJECT(REF). */
3101 tree
3102 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3104 unsigned HOST_WIDE_INT offset, size;
3105 tree v, fn, vtable;
3107 vtable = v = BINFO_VTABLE (known_binfo);
3108 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3109 if (!v)
3110 return NULL_TREE;
3112 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3114 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3115 v = TREE_OPERAND (v, 0);
3117 else
3118 offset = 0;
3120 if (TREE_CODE (v) != ADDR_EXPR)
3121 return NULL_TREE;
3122 v = TREE_OPERAND (v, 0);
3124 if (TREE_CODE (v) != VAR_DECL
3125 || !DECL_VIRTUAL_P (v)
3126 || !DECL_INITIAL (v)
3127 || DECL_INITIAL (v) == error_mark_node)
3128 return NULL_TREE;
3129 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3130 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3131 offset += token * size;
3132 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3133 offset, size, vtable);
3134 if (!fn || integer_zerop (fn))
3135 return NULL_TREE;
3136 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3137 || TREE_CODE (fn) == FDESC_EXPR);
3138 fn = TREE_OPERAND (fn, 0);
3139 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3141 /* When cgraph node is missing and function is not public, we cannot
3142 devirtualize. This can happen in WHOPR when the actual method
3143 ends up in other partition, because we found devirtualization
3144 possibility too late. */
3145 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3146 return NULL_TREE;
3148 /* Make sure we create a cgraph node for functions we'll reference.
3149 They can be non-existent if the reference comes from an entry
3150 of an external vtable for example. */
3151 cgraph_get_create_node (fn);
3153 return fn;
3156 /* Return true iff VAL is a gimple expression that is known to be
3157 non-negative. Restricted to floating-point inputs. */
3159 bool
3160 gimple_val_nonnegative_real_p (tree val)
3162 gimple def_stmt;
3164 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3166 /* Use existing logic for non-gimple trees. */
3167 if (tree_expr_nonnegative_p (val))
3168 return true;
3170 if (TREE_CODE (val) != SSA_NAME)
3171 return false;
3173 /* Currently we look only at the immediately defining statement
3174 to make this determination, since recursion on defining
3175 statements of operands can lead to quadratic behavior in the
3176 worst case. This is expected to catch almost all occurrences
3177 in practice. It would be possible to implement limited-depth
3178 recursion if important cases are lost. Alternatively, passes
3179 that need this information (such as the pow/powi lowering code
3180 in the cse_sincos pass) could be revised to provide it through
3181 dataflow propagation. */
3183 def_stmt = SSA_NAME_DEF_STMT (val);
3185 if (is_gimple_assign (def_stmt))
3187 tree op0, op1;
3189 /* See fold-const.c:tree_expr_nonnegative_p for additional
3190 cases that could be handled with recursion. */
3192 switch (gimple_assign_rhs_code (def_stmt))
3194 case ABS_EXPR:
3195 /* Always true for floating-point operands. */
3196 return true;
3198 case MULT_EXPR:
3199 /* True if the two operands are identical (since we are
3200 restricted to floating-point inputs). */
3201 op0 = gimple_assign_rhs1 (def_stmt);
3202 op1 = gimple_assign_rhs2 (def_stmt);
3204 if (op0 == op1
3205 || operand_equal_p (op0, op1, 0))
3206 return true;
3208 default:
3209 return false;
3212 else if (is_gimple_call (def_stmt))
3214 tree fndecl = gimple_call_fndecl (def_stmt);
3215 if (fndecl
3216 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3218 tree arg1;
3220 switch (DECL_FUNCTION_CODE (fndecl))
3222 CASE_FLT_FN (BUILT_IN_ACOS):
3223 CASE_FLT_FN (BUILT_IN_ACOSH):
3224 CASE_FLT_FN (BUILT_IN_CABS):
3225 CASE_FLT_FN (BUILT_IN_COSH):
3226 CASE_FLT_FN (BUILT_IN_ERFC):
3227 CASE_FLT_FN (BUILT_IN_EXP):
3228 CASE_FLT_FN (BUILT_IN_EXP10):
3229 CASE_FLT_FN (BUILT_IN_EXP2):
3230 CASE_FLT_FN (BUILT_IN_FABS):
3231 CASE_FLT_FN (BUILT_IN_FDIM):
3232 CASE_FLT_FN (BUILT_IN_HYPOT):
3233 CASE_FLT_FN (BUILT_IN_POW10):
3234 return true;
3236 CASE_FLT_FN (BUILT_IN_SQRT):
3237 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3238 nonnegative inputs. */
3239 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3240 return true;
3242 break;
3244 CASE_FLT_FN (BUILT_IN_POWI):
3245 /* True if the second argument is an even integer. */
3246 arg1 = gimple_call_arg (def_stmt, 1);
3248 if (TREE_CODE (arg1) == INTEGER_CST
3249 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3250 return true;
3252 break;
3254 CASE_FLT_FN (BUILT_IN_POW):
3255 /* True if the second argument is an even integer-valued
3256 real. */
3257 arg1 = gimple_call_arg (def_stmt, 1);
3259 if (TREE_CODE (arg1) == REAL_CST)
3261 REAL_VALUE_TYPE c;
3262 HOST_WIDE_INT n;
3264 c = TREE_REAL_CST (arg1);
3265 n = real_to_integer (&c);
3267 if ((n & 1) == 0)
3269 REAL_VALUE_TYPE cint;
3270 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3271 if (real_identical (&c, &cint))
3272 return true;
3276 break;
3278 default:
3279 return false;
3284 return false;