PR middle-end/55401
[official-gcc.git] / gcc / gimple-fold.c
blob6c4a46fb10b674cef9f4aeeb6ed9b24c661cbdcd
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "dumpfile.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
31 #include "target.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
37 reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61 symtab_node snode;
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
65 external var. */
66 if (!from_decl
67 || TREE_CODE (from_decl) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl)
69 || (flag_ltrans
70 && symtab_get_node (from_decl)->symbol.in_other_partition))
71 return true;
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
74 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
75 return true;
76 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
77 are always safe. */
78 if (DECL_EXTERNAL (decl)
79 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
80 return true;
81 /* We are folding reference from external vtable. The vtable may reffer
82 to a symbol keyed to other compilation unit. The other compilation
83 unit may be in separate DSO and the symbol may be hidden. */
84 if (DECL_VISIBILITY_SPECIFIED (decl)
85 && DECL_EXTERNAL (decl)
86 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition))
87 return false;
88 /* When function is public, we always can introduce new reference.
89 Exception are the COMDAT functions where introducing a direct
90 reference imply need to include function body in the curren tunit. */
91 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
92 return true;
93 /* We are not at ltrans stage; so don't worry about WHOPR.
94 Also when still gimplifying all referred comdat functions will be
95 produced.
97 As observed in PR20991 for already optimized out comdat virtual functions
98 it may be tempting to not necessarily give up because the copy will be
99 output elsewhere when corresponding vtable is output.
100 This is however not possible - ABI specify that COMDATs are output in
101 units where they are used and when the other unit was compiled with LTO
102 it is possible that vtable was kept public while the function itself
103 was privatized. */
104 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
105 return true;
107 /* OK we are seeing either COMDAT or static variable. In this case we must
108 check that the definition is still around so we can refer it. */
109 if (TREE_CODE (decl) == FUNCTION_DECL)
111 node = cgraph_get_node (decl);
112 /* Check that we still have function body and that we didn't took
113 the decision to eliminate offline copy of the function yet.
114 The second is important when devirtualization happens during final
115 compilation stage when making a new reference no longer makes callee
116 to be compiled. */
117 if (!node || !node->analyzed || node->global.inlined_to)
119 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
120 return false;
123 else if (TREE_CODE (decl) == VAR_DECL)
125 vnode = varpool_get_node (decl);
126 if (!vnode || !vnode->analyzed)
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
129 return false;
132 return true;
135 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
136 acceptable form for is_gimple_min_invariant.
137 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
139 tree
140 canonicalize_constructor_val (tree cval, tree from_decl)
142 tree orig_cval = cval;
143 STRIP_NOPS (cval);
144 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
145 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
147 tree ptr = TREE_OPERAND (cval, 0);
148 if (is_gimple_min_invariant (ptr))
149 cval = build1_loc (EXPR_LOCATION (cval),
150 ADDR_EXPR, TREE_TYPE (ptr),
151 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
152 ptr,
153 fold_convert (ptr_type_node,
154 TREE_OPERAND (cval, 1))));
156 if (TREE_CODE (cval) == ADDR_EXPR)
158 tree base = NULL_TREE;
159 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
161 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
162 if (base)
163 TREE_OPERAND (cval, 0) = base;
165 else
166 base = get_base_address (TREE_OPERAND (cval, 0));
167 if (!base)
168 return NULL_TREE;
170 if ((TREE_CODE (base) == VAR_DECL
171 || TREE_CODE (base) == FUNCTION_DECL)
172 && !can_refer_decl_in_current_unit_p (base, from_decl))
173 return NULL_TREE;
174 if (TREE_CODE (base) == VAR_DECL)
175 TREE_ADDRESSABLE (base) = 1;
176 else if (TREE_CODE (base) == FUNCTION_DECL)
178 /* Make sure we create a cgraph node for functions we'll reference.
179 They can be non-existent if the reference comes from an entry
180 of an external vtable for example. */
181 cgraph_get_create_node (base);
183 /* Fixup types in global initializers. */
184 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
185 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
187 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
188 cval = fold_convert (TREE_TYPE (orig_cval), cval);
189 return cval;
191 return orig_cval;
194 /* If SYM is a constant variable with known value, return the value.
195 NULL_TREE is returned otherwise. */
197 tree
198 get_symbol_constant_value (tree sym)
200 if (const_value_known_p (sym))
202 tree val = DECL_INITIAL (sym);
203 if (val)
205 val = canonicalize_constructor_val (val, sym);
206 if (val && is_gimple_min_invariant (val))
207 return val;
208 else
209 return NULL_TREE;
211 /* Variables declared 'const' without an initializer
212 have zero as the initializer if they may not be
213 overridden at link or run time. */
214 if (!val
215 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
216 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
217 return build_zero_cst (TREE_TYPE (sym));
220 return NULL_TREE;
225 /* Subroutine of fold_stmt. We perform several simplifications of the
226 memory reference tree EXPR and make sure to re-gimplify them properly
227 after propagation of constant addresses. IS_LHS is true if the
228 reference is supposed to be an lvalue. */
230 static tree
231 maybe_fold_reference (tree expr, bool is_lhs)
233 tree *t = &expr;
234 tree result;
236 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
237 || TREE_CODE (expr) == REALPART_EXPR
238 || TREE_CODE (expr) == IMAGPART_EXPR)
239 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
240 return fold_unary_loc (EXPR_LOCATION (expr),
241 TREE_CODE (expr),
242 TREE_TYPE (expr),
243 TREE_OPERAND (expr, 0));
244 else if (TREE_CODE (expr) == BIT_FIELD_REF
245 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
246 return fold_ternary_loc (EXPR_LOCATION (expr),
247 TREE_CODE (expr),
248 TREE_TYPE (expr),
249 TREE_OPERAND (expr, 0),
250 TREE_OPERAND (expr, 1),
251 TREE_OPERAND (expr, 2));
253 while (handled_component_p (*t))
254 t = &TREE_OPERAND (*t, 0);
256 /* Canonicalize MEM_REFs invariant address operand. Do this first
257 to avoid feeding non-canonical MEM_REFs elsewhere. */
258 if (TREE_CODE (*t) == MEM_REF
259 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
261 bool volatile_p = TREE_THIS_VOLATILE (*t);
262 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
263 TREE_OPERAND (*t, 0),
264 TREE_OPERAND (*t, 1));
265 if (tem)
267 TREE_THIS_VOLATILE (tem) = volatile_p;
268 *t = tem;
269 tem = maybe_fold_reference (expr, is_lhs);
270 if (tem)
271 return tem;
272 return expr;
276 if (!is_lhs
277 && (result = fold_const_aggregate_ref (expr))
278 && is_gimple_min_invariant (result))
279 return result;
281 /* Fold back MEM_REFs to reference trees. */
282 if (TREE_CODE (*t) == MEM_REF
283 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
284 && integer_zerop (TREE_OPERAND (*t, 1))
285 && (TREE_THIS_VOLATILE (*t)
286 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
287 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
288 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
289 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
290 /* We have to look out here to not drop a required conversion
291 from the rhs to the lhs if is_lhs, but we don't have the
292 rhs here to verify that. Thus require strict type
293 compatibility. */
294 && types_compatible_p (TREE_TYPE (*t),
295 TREE_TYPE (TREE_OPERAND
296 (TREE_OPERAND (*t, 0), 0))))
298 tree tem;
299 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
300 tem = maybe_fold_reference (expr, is_lhs);
301 if (tem)
302 return tem;
303 return expr;
305 else if (TREE_CODE (*t) == TARGET_MEM_REF)
307 tree tem = maybe_fold_tmr (*t);
308 if (tem)
310 *t = tem;
311 tem = maybe_fold_reference (expr, is_lhs);
312 if (tem)
313 return tem;
314 return expr;
318 return NULL_TREE;
322 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
323 replacement rhs for the statement or NULL_TREE if no simplification
324 could be made. It is assumed that the operands have been previously
325 folded. */
327 static tree
328 fold_gimple_assign (gimple_stmt_iterator *si)
330 gimple stmt = gsi_stmt (*si);
331 enum tree_code subcode = gimple_assign_rhs_code (stmt);
332 location_t loc = gimple_location (stmt);
334 tree result = NULL_TREE;
336 switch (get_gimple_rhs_class (subcode))
338 case GIMPLE_SINGLE_RHS:
340 tree rhs = gimple_assign_rhs1 (stmt);
342 if (REFERENCE_CLASS_P (rhs))
343 return maybe_fold_reference (rhs, false);
345 else if (TREE_CODE (rhs) == ADDR_EXPR)
347 tree ref = TREE_OPERAND (rhs, 0);
348 tree tem = maybe_fold_reference (ref, true);
349 if (tem
350 && TREE_CODE (tem) == MEM_REF
351 && integer_zerop (TREE_OPERAND (tem, 1)))
352 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
353 else if (tem)
354 result = fold_convert (TREE_TYPE (rhs),
355 build_fold_addr_expr_loc (loc, tem));
356 else if (TREE_CODE (ref) == MEM_REF
357 && integer_zerop (TREE_OPERAND (ref, 1)))
358 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
361 else if (TREE_CODE (rhs) == CONSTRUCTOR
362 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
363 && (CONSTRUCTOR_NELTS (rhs)
364 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
366 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
367 unsigned i;
368 tree val;
370 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
371 if (TREE_CODE (val) != INTEGER_CST
372 && TREE_CODE (val) != REAL_CST
373 && TREE_CODE (val) != FIXED_CST)
374 return NULL_TREE;
376 return build_vector_from_ctor (TREE_TYPE (rhs),
377 CONSTRUCTOR_ELTS (rhs));
380 else if (DECL_P (rhs))
381 return unshare_expr (get_symbol_constant_value (rhs));
383 /* If we couldn't fold the RHS, hand over to the generic
384 fold routines. */
385 if (result == NULL_TREE)
386 result = fold (rhs);
388 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
389 that may have been added by fold, and "useless" type
390 conversions that might now be apparent due to propagation. */
391 STRIP_USELESS_TYPE_CONVERSION (result);
393 if (result != rhs && valid_gimple_rhs_p (result))
394 return result;
396 return NULL_TREE;
398 break;
400 case GIMPLE_UNARY_RHS:
402 tree rhs = gimple_assign_rhs1 (stmt);
404 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
405 if (result)
407 /* If the operation was a conversion do _not_ mark a
408 resulting constant with TREE_OVERFLOW if the original
409 constant was not. These conversions have implementation
410 defined behavior and retaining the TREE_OVERFLOW flag
411 here would confuse later passes such as VRP. */
412 if (CONVERT_EXPR_CODE_P (subcode)
413 && TREE_CODE (result) == INTEGER_CST
414 && TREE_CODE (rhs) == INTEGER_CST)
415 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
417 STRIP_USELESS_TYPE_CONVERSION (result);
418 if (valid_gimple_rhs_p (result))
419 return result;
422 break;
424 case GIMPLE_BINARY_RHS:
425 /* Try to canonicalize for boolean-typed X the comparisons
426 X == 0, X == 1, X != 0, and X != 1. */
427 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
428 || gimple_assign_rhs_code (stmt) == NE_EXPR)
430 tree lhs = gimple_assign_lhs (stmt);
431 tree op1 = gimple_assign_rhs1 (stmt);
432 tree op2 = gimple_assign_rhs2 (stmt);
433 tree type = TREE_TYPE (op1);
435 /* Check whether the comparison operands are of the same boolean
436 type as the result type is.
437 Check that second operand is an integer-constant with value
438 one or zero. */
439 if (TREE_CODE (op2) == INTEGER_CST
440 && (integer_zerop (op2) || integer_onep (op2))
441 && useless_type_conversion_p (TREE_TYPE (lhs), type))
443 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
444 bool is_logical_not = false;
446 /* X == 0 and X != 1 is a logical-not.of X
447 X == 1 and X != 0 is X */
448 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
449 || (cmp_code == NE_EXPR && integer_onep (op2)))
450 is_logical_not = true;
452 if (is_logical_not == false)
453 result = op1;
454 /* Only for one-bit precision typed X the transformation
455 !X -> ~X is valied. */
456 else if (TYPE_PRECISION (type) == 1)
457 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
458 type, op1);
459 /* Otherwise we use !X -> X ^ 1. */
460 else
461 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
462 type, op1, build_int_cst (type, 1));
467 if (!result)
468 result = fold_binary_loc (loc, subcode,
469 TREE_TYPE (gimple_assign_lhs (stmt)),
470 gimple_assign_rhs1 (stmt),
471 gimple_assign_rhs2 (stmt));
473 if (result)
475 STRIP_USELESS_TYPE_CONVERSION (result);
476 if (valid_gimple_rhs_p (result))
477 return result;
479 break;
481 case GIMPLE_TERNARY_RHS:
482 /* Try to fold a conditional expression. */
483 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
485 tree op0 = gimple_assign_rhs1 (stmt);
486 tree tem;
487 bool set = false;
488 location_t cond_loc = gimple_location (stmt);
490 if (COMPARISON_CLASS_P (op0))
492 fold_defer_overflow_warnings ();
493 tem = fold_binary_loc (cond_loc,
494 TREE_CODE (op0), TREE_TYPE (op0),
495 TREE_OPERAND (op0, 0),
496 TREE_OPERAND (op0, 1));
497 /* This is actually a conditional expression, not a GIMPLE
498 conditional statement, however, the valid_gimple_rhs_p
499 test still applies. */
500 set = (tem && is_gimple_condexpr (tem)
501 && valid_gimple_rhs_p (tem));
502 fold_undefer_overflow_warnings (set, stmt, 0);
504 else if (is_gimple_min_invariant (op0))
506 tem = op0;
507 set = true;
509 else
510 return NULL_TREE;
512 if (set)
513 result = fold_build3_loc (cond_loc, COND_EXPR,
514 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
515 gimple_assign_rhs2 (stmt),
516 gimple_assign_rhs3 (stmt));
519 if (!result)
520 result = fold_ternary_loc (loc, subcode,
521 TREE_TYPE (gimple_assign_lhs (stmt)),
522 gimple_assign_rhs1 (stmt),
523 gimple_assign_rhs2 (stmt),
524 gimple_assign_rhs3 (stmt));
526 if (result)
528 STRIP_USELESS_TYPE_CONVERSION (result);
529 if (valid_gimple_rhs_p (result))
530 return result;
532 break;
534 case GIMPLE_INVALID_RHS:
535 gcc_unreachable ();
538 return NULL_TREE;
541 /* Attempt to fold a conditional statement. Return true if any changes were
542 made. We only attempt to fold the condition expression, and do not perform
543 any transformation that would require alteration of the cfg. It is
544 assumed that the operands have been previously folded. */
546 static bool
547 fold_gimple_cond (gimple stmt)
549 tree result = fold_binary_loc (gimple_location (stmt),
550 gimple_cond_code (stmt),
551 boolean_type_node,
552 gimple_cond_lhs (stmt),
553 gimple_cond_rhs (stmt));
555 if (result)
557 STRIP_USELESS_TYPE_CONVERSION (result);
558 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
560 gimple_cond_set_condition_from_tree (stmt, result);
561 return true;
565 return false;
568 /* Convert EXPR into a GIMPLE value suitable for substitution on the
569 RHS of an assignment. Insert the necessary statements before
570 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
571 is replaced. If the call is expected to produces a result, then it
572 is replaced by an assignment of the new RHS to the result variable.
573 If the result is to be ignored, then the call is replaced by a
574 GIMPLE_NOP. A proper VDEF chain is retained by making the first
575 VUSE and the last VDEF of the whole sequence be the same as the replaced
576 statement and using new SSA names for stores in between. */
578 void
579 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
581 tree lhs;
582 gimple stmt, new_stmt;
583 gimple_stmt_iterator i;
584 gimple_seq stmts = NULL;
585 struct gimplify_ctx gctx;
586 gimple laststore;
587 tree reaching_vuse;
589 stmt = gsi_stmt (*si_p);
591 gcc_assert (is_gimple_call (stmt));
593 push_gimplify_context (&gctx);
594 gctx.into_ssa = gimple_in_ssa_p (cfun);
596 lhs = gimple_call_lhs (stmt);
597 if (lhs == NULL_TREE)
599 gimplify_and_add (expr, &stmts);
600 /* We can end up with folding a memcpy of an empty class assignment
601 which gets optimized away by C++ gimplification. */
602 if (gimple_seq_empty_p (stmts))
604 pop_gimplify_context (NULL);
605 if (gimple_in_ssa_p (cfun))
607 unlink_stmt_vdef (stmt);
608 release_defs (stmt);
610 gsi_replace (si_p, gimple_build_nop (), true);
611 return;
614 else
616 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
617 new_stmt = gimple_build_assign (lhs, tmp);
618 i = gsi_last (stmts);
619 gsi_insert_after_without_update (&i, new_stmt,
620 GSI_CONTINUE_LINKING);
623 pop_gimplify_context (NULL);
625 if (gimple_has_location (stmt))
626 annotate_all_with_location (stmts, gimple_location (stmt));
628 /* First iterate over the replacement statements backward, assigning
629 virtual operands to their defining statements. */
630 laststore = NULL;
631 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
633 new_stmt = gsi_stmt (i);
634 if ((gimple_assign_single_p (new_stmt)
635 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
636 || (is_gimple_call (new_stmt)
637 && (gimple_call_flags (new_stmt)
638 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
640 tree vdef;
641 if (!laststore)
642 vdef = gimple_vdef (stmt);
643 else
644 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
645 gimple_set_vdef (new_stmt, vdef);
646 if (vdef && TREE_CODE (vdef) == SSA_NAME)
647 SSA_NAME_DEF_STMT (vdef) = new_stmt;
648 laststore = new_stmt;
652 /* Second iterate over the statements forward, assigning virtual
653 operands to their uses. */
654 reaching_vuse = gimple_vuse (stmt);
655 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
657 new_stmt = gsi_stmt (i);
658 /* If the new statement possibly has a VUSE, update it with exact SSA
659 name we know will reach this one. */
660 if (gimple_has_mem_ops (new_stmt))
661 gimple_set_vuse (new_stmt, reaching_vuse);
662 gimple_set_modified (new_stmt, true);
663 if (gimple_vdef (new_stmt))
664 reaching_vuse = gimple_vdef (new_stmt);
667 /* If the new sequence does not do a store release the virtual
668 definition of the original statement. */
669 if (reaching_vuse
670 && reaching_vuse == gimple_vuse (stmt))
672 tree vdef = gimple_vdef (stmt);
673 if (vdef
674 && TREE_CODE (vdef) == SSA_NAME)
676 unlink_stmt_vdef (stmt);
677 release_ssa_name (vdef);
681 /* Finally replace the original statement with the sequence. */
682 gsi_replace_with_seq (si_p, stmts, false);
685 /* Return the string length, maximum string length or maximum value of
686 ARG in LENGTH.
687 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
688 is not NULL and, for TYPE == 0, its value is not equal to the length
689 we determine or if we are unable to determine the length or value,
690 return false. VISITED is a bitmap of visited variables.
691 TYPE is 0 if string length should be returned, 1 for maximum string
692 length and 2 for maximum value ARG can have. */
694 static bool
695 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
697 tree var, val;
698 gimple def_stmt;
700 if (TREE_CODE (arg) != SSA_NAME)
702 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
703 if (TREE_CODE (arg) == ADDR_EXPR
704 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
705 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
707 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
708 if (TREE_CODE (aop0) == INDIRECT_REF
709 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
710 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
711 length, visited, type);
714 if (type == 2)
716 val = arg;
717 if (TREE_CODE (val) != INTEGER_CST
718 || tree_int_cst_sgn (val) < 0)
719 return false;
721 else
722 val = c_strlen (arg, 1);
723 if (!val)
724 return false;
726 if (*length)
728 if (type > 0)
730 if (TREE_CODE (*length) != INTEGER_CST
731 || TREE_CODE (val) != INTEGER_CST)
732 return false;
734 if (tree_int_cst_lt (*length, val))
735 *length = val;
736 return true;
738 else if (simple_cst_equal (val, *length) != 1)
739 return false;
742 *length = val;
743 return true;
746 /* If ARG is registered for SSA update we cannot look at its defining
747 statement. */
748 if (name_registered_for_update_p (arg))
749 return false;
751 /* If we were already here, break the infinite cycle. */
752 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
753 return true;
755 var = arg;
756 def_stmt = SSA_NAME_DEF_STMT (var);
758 switch (gimple_code (def_stmt))
760 case GIMPLE_ASSIGN:
761 /* The RHS of the statement defining VAR must either have a
762 constant length or come from another SSA_NAME with a constant
763 length. */
764 if (gimple_assign_single_p (def_stmt)
765 || gimple_assign_unary_nop_p (def_stmt))
767 tree rhs = gimple_assign_rhs1 (def_stmt);
768 return get_maxval_strlen (rhs, length, visited, type);
770 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
772 tree op2 = gimple_assign_rhs2 (def_stmt);
773 tree op3 = gimple_assign_rhs3 (def_stmt);
774 return get_maxval_strlen (op2, length, visited, type)
775 && get_maxval_strlen (op3, length, visited, type);
777 return false;
779 case GIMPLE_PHI:
781 /* All the arguments of the PHI node must have the same constant
782 length. */
783 unsigned i;
785 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
787 tree arg = gimple_phi_arg (def_stmt, i)->def;
789 /* If this PHI has itself as an argument, we cannot
790 determine the string length of this argument. However,
791 if we can find a constant string length for the other
792 PHI args then we can still be sure that this is a
793 constant string length. So be optimistic and just
794 continue with the next argument. */
795 if (arg == gimple_phi_result (def_stmt))
796 continue;
798 if (!get_maxval_strlen (arg, length, visited, type))
799 return false;
802 return true;
804 default:
805 return false;
810 /* Fold builtin call in statement STMT. Returns a simplified tree.
811 We may return a non-constant expression, including another call
812 to a different function and with different arguments, e.g.,
813 substituting memcpy for strcpy when the string length is known.
814 Note that some builtins expand into inline code that may not
815 be valid in GIMPLE. Callers must take care. */
817 tree
818 gimple_fold_builtin (gimple stmt)
820 tree result, val[3];
821 tree callee, a;
822 int arg_idx, type;
823 bitmap visited;
824 bool ignore;
825 int nargs;
826 location_t loc = gimple_location (stmt);
828 gcc_assert (is_gimple_call (stmt));
830 ignore = (gimple_call_lhs (stmt) == NULL);
832 /* First try the generic builtin folder. If that succeeds, return the
833 result directly. */
834 result = fold_call_stmt (stmt, ignore);
835 if (result)
837 if (ignore)
838 STRIP_NOPS (result);
839 return result;
842 /* Ignore MD builtins. */
843 callee = gimple_call_fndecl (stmt);
844 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
845 return NULL_TREE;
847 /* Give up for always_inline inline builtins until they are
848 inlined. */
849 if (avoid_folding_inline_builtin (callee))
850 return NULL_TREE;
852 /* If the builtin could not be folded, and it has no argument list,
853 we're done. */
854 nargs = gimple_call_num_args (stmt);
855 if (nargs == 0)
856 return NULL_TREE;
858 /* Limit the work only for builtins we know how to simplify. */
859 switch (DECL_FUNCTION_CODE (callee))
861 case BUILT_IN_STRLEN:
862 case BUILT_IN_FPUTS:
863 case BUILT_IN_FPUTS_UNLOCKED:
864 arg_idx = 0;
865 type = 0;
866 break;
867 case BUILT_IN_STRCPY:
868 case BUILT_IN_STRNCPY:
869 arg_idx = 1;
870 type = 0;
871 break;
872 case BUILT_IN_MEMCPY_CHK:
873 case BUILT_IN_MEMPCPY_CHK:
874 case BUILT_IN_MEMMOVE_CHK:
875 case BUILT_IN_MEMSET_CHK:
876 case BUILT_IN_STRNCPY_CHK:
877 case BUILT_IN_STPNCPY_CHK:
878 arg_idx = 2;
879 type = 2;
880 break;
881 case BUILT_IN_STRCPY_CHK:
882 case BUILT_IN_STPCPY_CHK:
883 arg_idx = 1;
884 type = 1;
885 break;
886 case BUILT_IN_SNPRINTF_CHK:
887 case BUILT_IN_VSNPRINTF_CHK:
888 arg_idx = 1;
889 type = 2;
890 break;
891 default:
892 return NULL_TREE;
895 if (arg_idx >= nargs)
896 return NULL_TREE;
898 /* Try to use the dataflow information gathered by the CCP process. */
899 visited = BITMAP_ALLOC (NULL);
900 bitmap_clear (visited);
902 memset (val, 0, sizeof (val));
903 a = gimple_call_arg (stmt, arg_idx);
904 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
905 val[arg_idx] = NULL_TREE;
907 BITMAP_FREE (visited);
909 result = NULL_TREE;
910 switch (DECL_FUNCTION_CODE (callee))
912 case BUILT_IN_STRLEN:
913 if (val[0] && nargs == 1)
915 tree new_val =
916 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
918 /* If the result is not a valid gimple value, or not a cast
919 of a valid gimple value, then we cannot use the result. */
920 if (is_gimple_val (new_val)
921 || (CONVERT_EXPR_P (new_val)
922 && is_gimple_val (TREE_OPERAND (new_val, 0))))
923 return new_val;
925 break;
927 case BUILT_IN_STRCPY:
928 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
929 result = fold_builtin_strcpy (loc, callee,
930 gimple_call_arg (stmt, 0),
931 gimple_call_arg (stmt, 1),
932 val[1]);
933 break;
935 case BUILT_IN_STRNCPY:
936 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
937 result = fold_builtin_strncpy (loc, callee,
938 gimple_call_arg (stmt, 0),
939 gimple_call_arg (stmt, 1),
940 gimple_call_arg (stmt, 2),
941 val[1]);
942 break;
944 case BUILT_IN_FPUTS:
945 if (nargs == 2)
946 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
947 gimple_call_arg (stmt, 1),
948 ignore, false, val[0]);
949 break;
951 case BUILT_IN_FPUTS_UNLOCKED:
952 if (nargs == 2)
953 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
954 gimple_call_arg (stmt, 1),
955 ignore, true, val[0]);
956 break;
958 case BUILT_IN_MEMCPY_CHK:
959 case BUILT_IN_MEMPCPY_CHK:
960 case BUILT_IN_MEMMOVE_CHK:
961 case BUILT_IN_MEMSET_CHK:
962 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
963 result = fold_builtin_memory_chk (loc, callee,
964 gimple_call_arg (stmt, 0),
965 gimple_call_arg (stmt, 1),
966 gimple_call_arg (stmt, 2),
967 gimple_call_arg (stmt, 3),
968 val[2], ignore,
969 DECL_FUNCTION_CODE (callee));
970 break;
972 case BUILT_IN_STRCPY_CHK:
973 case BUILT_IN_STPCPY_CHK:
974 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
975 result = fold_builtin_stxcpy_chk (loc, callee,
976 gimple_call_arg (stmt, 0),
977 gimple_call_arg (stmt, 1),
978 gimple_call_arg (stmt, 2),
979 val[1], ignore,
980 DECL_FUNCTION_CODE (callee));
981 break;
983 case BUILT_IN_STRNCPY_CHK:
984 case BUILT_IN_STPNCPY_CHK:
985 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
986 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
987 gimple_call_arg (stmt, 1),
988 gimple_call_arg (stmt, 2),
989 gimple_call_arg (stmt, 3),
990 val[2], ignore,
991 DECL_FUNCTION_CODE (callee));
992 break;
994 case BUILT_IN_SNPRINTF_CHK:
995 case BUILT_IN_VSNPRINTF_CHK:
996 if (val[1] && is_gimple_val (val[1]))
997 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
998 DECL_FUNCTION_CODE (callee));
999 break;
1001 default:
1002 gcc_unreachable ();
1005 if (result && ignore)
1006 result = fold_ignored_result (result);
1007 return result;
1011 /* Return a binfo to be used for devirtualization of calls based on an object
1012 represented by a declaration (i.e. a global or automatically allocated one)
1013 or NULL if it cannot be found or is not safe. CST is expected to be an
1014 ADDR_EXPR of such object or the function will return NULL. Currently it is
1015 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1017 tree
1018 gimple_extract_devirt_binfo_from_cst (tree cst)
1020 HOST_WIDE_INT offset, size, max_size;
1021 tree base, type, expected_type, binfo;
1022 bool last_artificial = false;
1024 if (!flag_devirtualize
1025 || TREE_CODE (cst) != ADDR_EXPR
1026 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1027 return NULL_TREE;
1029 cst = TREE_OPERAND (cst, 0);
1030 expected_type = TREE_TYPE (cst);
1031 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1032 type = TREE_TYPE (base);
1033 if (!DECL_P (base)
1034 || max_size == -1
1035 || max_size != size
1036 || TREE_CODE (type) != RECORD_TYPE)
1037 return NULL_TREE;
1039 /* Find the sub-object the constant actually refers to and mark whether it is
1040 an artificial one (as opposed to a user-defined one). */
1041 while (true)
1043 HOST_WIDE_INT pos, size;
1044 tree fld;
1046 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1047 break;
1048 if (offset < 0)
1049 return NULL_TREE;
1051 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1053 if (TREE_CODE (fld) != FIELD_DECL)
1054 continue;
1056 pos = int_bit_position (fld);
1057 size = tree_low_cst (DECL_SIZE (fld), 1);
1058 if (pos <= offset && (pos + size) > offset)
1059 break;
1061 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1062 return NULL_TREE;
1064 last_artificial = DECL_ARTIFICIAL (fld);
1065 type = TREE_TYPE (fld);
1066 offset -= pos;
1068 /* Artificial sub-objects are ancestors, we do not want to use them for
1069 devirtualization, at least not here. */
1070 if (last_artificial)
1071 return NULL_TREE;
1072 binfo = TYPE_BINFO (type);
1073 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1074 return NULL_TREE;
1075 else
1076 return binfo;
1079 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1080 The statement may be replaced by another statement, e.g., if the call
1081 simplifies to a constant value. Return true if any changes were made.
1082 It is assumed that the operands have been previously folded. */
1084 static bool
1085 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1087 gimple stmt = gsi_stmt (*gsi);
1088 tree callee;
1089 bool changed = false;
1090 unsigned i;
1092 /* Fold *& in call arguments. */
1093 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1094 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1096 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1097 if (tmp)
1099 gimple_call_set_arg (stmt, i, tmp);
1100 changed = true;
1104 /* Check for virtual calls that became direct calls. */
1105 callee = gimple_call_fn (stmt);
1106 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1108 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1110 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1111 changed = true;
1113 else
1115 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1116 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1117 if (binfo)
1119 HOST_WIDE_INT token
1120 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1121 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1122 if (fndecl)
1124 gimple_call_set_fndecl (stmt, fndecl);
1125 changed = true;
1131 if (inplace)
1132 return changed;
1134 /* Check for builtins that CCP can handle using information not
1135 available in the generic fold routines. */
1136 callee = gimple_call_fndecl (stmt);
1137 if (callee && DECL_BUILT_IN (callee))
1139 tree result = gimple_fold_builtin (stmt);
1140 if (result)
1142 if (!update_call_from_tree (gsi, result))
1143 gimplify_and_update_call_from_tree (gsi, result);
1144 changed = true;
1148 return changed;
1151 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1152 distinguishes both cases. */
1154 static bool
1155 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1157 bool changed = false;
1158 gimple stmt = gsi_stmt (*gsi);
1159 unsigned i;
1160 gimple_stmt_iterator gsinext = *gsi;
1161 gimple next_stmt;
1163 gsi_next (&gsinext);
1164 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1166 /* Fold the main computation performed by the statement. */
1167 switch (gimple_code (stmt))
1169 case GIMPLE_ASSIGN:
1171 unsigned old_num_ops = gimple_num_ops (stmt);
1172 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1173 tree lhs = gimple_assign_lhs (stmt);
1174 tree new_rhs;
1175 /* First canonicalize operand order. This avoids building new
1176 trees if this is the only thing fold would later do. */
1177 if ((commutative_tree_code (subcode)
1178 || commutative_ternary_tree_code (subcode))
1179 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1180 gimple_assign_rhs2 (stmt), false))
1182 tree tem = gimple_assign_rhs1 (stmt);
1183 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1184 gimple_assign_set_rhs2 (stmt, tem);
1185 changed = true;
1187 new_rhs = fold_gimple_assign (gsi);
1188 if (new_rhs
1189 && !useless_type_conversion_p (TREE_TYPE (lhs),
1190 TREE_TYPE (new_rhs)))
1191 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1192 if (new_rhs
1193 && (!inplace
1194 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1196 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1197 changed = true;
1199 break;
1202 case GIMPLE_COND:
1203 changed |= fold_gimple_cond (stmt);
1204 break;
1206 case GIMPLE_CALL:
1207 changed |= gimple_fold_call (gsi, inplace);
1208 break;
1210 case GIMPLE_ASM:
1211 /* Fold *& in asm operands. */
1213 size_t noutputs;
1214 const char **oconstraints;
1215 const char *constraint;
1216 bool allows_mem, allows_reg;
1218 noutputs = gimple_asm_noutputs (stmt);
1219 oconstraints = XALLOCAVEC (const char *, noutputs);
1221 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1223 tree link = gimple_asm_output_op (stmt, i);
1224 tree op = TREE_VALUE (link);
1225 oconstraints[i]
1226 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1227 if (REFERENCE_CLASS_P (op)
1228 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1230 TREE_VALUE (link) = op;
1231 changed = true;
1234 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1236 tree link = gimple_asm_input_op (stmt, i);
1237 tree op = TREE_VALUE (link);
1238 constraint
1239 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1240 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1241 oconstraints, &allows_mem, &allows_reg);
1242 if (REFERENCE_CLASS_P (op)
1243 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1244 != NULL_TREE)
1246 TREE_VALUE (link) = op;
1247 changed = true;
1251 break;
1253 case GIMPLE_DEBUG:
1254 if (gimple_debug_bind_p (stmt))
1256 tree val = gimple_debug_bind_get_value (stmt);
1257 if (val
1258 && REFERENCE_CLASS_P (val))
1260 tree tem = maybe_fold_reference (val, false);
1261 if (tem)
1263 gimple_debug_bind_set_value (stmt, tem);
1264 changed = true;
1267 else if (val
1268 && TREE_CODE (val) == ADDR_EXPR)
1270 tree ref = TREE_OPERAND (val, 0);
1271 tree tem = maybe_fold_reference (ref, false);
1272 if (tem)
1274 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1275 gimple_debug_bind_set_value (stmt, tem);
1276 changed = true;
1280 break;
1282 default:;
1285 stmt = gsi_stmt (*gsi);
1287 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1288 as we'd changing the next stmt. */
1289 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1291 tree lhs = gimple_get_lhs (stmt);
1292 if (lhs && REFERENCE_CLASS_P (lhs))
1294 tree new_lhs = maybe_fold_reference (lhs, true);
1295 if (new_lhs)
1297 gimple_set_lhs (stmt, new_lhs);
1298 changed = true;
1303 return changed;
1306 /* Fold the statement pointed to by GSI. In some cases, this function may
1307 replace the whole statement with a new one. Returns true iff folding
1308 makes any changes.
1309 The statement pointed to by GSI should be in valid gimple form but may
1310 be in unfolded state as resulting from for example constant propagation
1311 which can produce *&x = 0. */
1313 bool
1314 fold_stmt (gimple_stmt_iterator *gsi)
1316 return fold_stmt_1 (gsi, false);
1319 /* Perform the minimal folding on statement *GSI. Only operations like
1320 *&x created by constant propagation are handled. The statement cannot
1321 be replaced with a new one. Return true if the statement was
1322 changed, false otherwise.
1323 The statement *GSI should be in valid gimple form but may
1324 be in unfolded state as resulting from for example constant propagation
1325 which can produce *&x = 0. */
1327 bool
1328 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1330 gimple stmt = gsi_stmt (*gsi);
1331 bool changed = fold_stmt_1 (gsi, true);
1332 gcc_assert (gsi_stmt (*gsi) == stmt);
1333 return changed;
1336 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1337 if EXPR is null or we don't know how.
1338 If non-null, the result always has boolean type. */
1340 static tree
1341 canonicalize_bool (tree expr, bool invert)
1343 if (!expr)
1344 return NULL_TREE;
1345 else if (invert)
1347 if (integer_nonzerop (expr))
1348 return boolean_false_node;
1349 else if (integer_zerop (expr))
1350 return boolean_true_node;
1351 else if (TREE_CODE (expr) == SSA_NAME)
1352 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1353 build_int_cst (TREE_TYPE (expr), 0));
1354 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1355 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1356 boolean_type_node,
1357 TREE_OPERAND (expr, 0),
1358 TREE_OPERAND (expr, 1));
1359 else
1360 return NULL_TREE;
1362 else
1364 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1365 return expr;
1366 if (integer_nonzerop (expr))
1367 return boolean_true_node;
1368 else if (integer_zerop (expr))
1369 return boolean_false_node;
1370 else if (TREE_CODE (expr) == SSA_NAME)
1371 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1372 build_int_cst (TREE_TYPE (expr), 0));
1373 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1374 return fold_build2 (TREE_CODE (expr),
1375 boolean_type_node,
1376 TREE_OPERAND (expr, 0),
1377 TREE_OPERAND (expr, 1));
1378 else
1379 return NULL_TREE;
1383 /* Check to see if a boolean expression EXPR is logically equivalent to the
1384 comparison (OP1 CODE OP2). Check for various identities involving
1385 SSA_NAMEs. */
1387 static bool
1388 same_bool_comparison_p (const_tree expr, enum tree_code code,
1389 const_tree op1, const_tree op2)
1391 gimple s;
1393 /* The obvious case. */
1394 if (TREE_CODE (expr) == code
1395 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1396 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1397 return true;
1399 /* Check for comparing (name, name != 0) and the case where expr
1400 is an SSA_NAME with a definition matching the comparison. */
1401 if (TREE_CODE (expr) == SSA_NAME
1402 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1404 if (operand_equal_p (expr, op1, 0))
1405 return ((code == NE_EXPR && integer_zerop (op2))
1406 || (code == EQ_EXPR && integer_nonzerop (op2)));
1407 s = SSA_NAME_DEF_STMT (expr);
1408 if (is_gimple_assign (s)
1409 && gimple_assign_rhs_code (s) == code
1410 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1411 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1412 return true;
1415 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1416 of name is a comparison, recurse. */
1417 if (TREE_CODE (op1) == SSA_NAME
1418 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1420 s = SSA_NAME_DEF_STMT (op1);
1421 if (is_gimple_assign (s)
1422 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1424 enum tree_code c = gimple_assign_rhs_code (s);
1425 if ((c == NE_EXPR && integer_zerop (op2))
1426 || (c == EQ_EXPR && integer_nonzerop (op2)))
1427 return same_bool_comparison_p (expr, c,
1428 gimple_assign_rhs1 (s),
1429 gimple_assign_rhs2 (s));
1430 if ((c == EQ_EXPR && integer_zerop (op2))
1431 || (c == NE_EXPR && integer_nonzerop (op2)))
1432 return same_bool_comparison_p (expr,
1433 invert_tree_comparison (c, false),
1434 gimple_assign_rhs1 (s),
1435 gimple_assign_rhs2 (s));
1438 return false;
1441 /* Check to see if two boolean expressions OP1 and OP2 are logically
1442 equivalent. */
1444 static bool
1445 same_bool_result_p (const_tree op1, const_tree op2)
1447 /* Simple cases first. */
1448 if (operand_equal_p (op1, op2, 0))
1449 return true;
1451 /* Check the cases where at least one of the operands is a comparison.
1452 These are a bit smarter than operand_equal_p in that they apply some
1453 identifies on SSA_NAMEs. */
1454 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1455 && same_bool_comparison_p (op1, TREE_CODE (op2),
1456 TREE_OPERAND (op2, 0),
1457 TREE_OPERAND (op2, 1)))
1458 return true;
1459 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1460 && same_bool_comparison_p (op2, TREE_CODE (op1),
1461 TREE_OPERAND (op1, 0),
1462 TREE_OPERAND (op1, 1)))
1463 return true;
1465 /* Default case. */
1466 return false;
1469 /* Forward declarations for some mutually recursive functions. */
1471 static tree
1472 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1473 enum tree_code code2, tree op2a, tree op2b);
1474 static tree
1475 and_var_with_comparison (tree var, bool invert,
1476 enum tree_code code2, tree op2a, tree op2b);
1477 static tree
1478 and_var_with_comparison_1 (gimple stmt,
1479 enum tree_code code2, tree op2a, tree op2b);
1480 static tree
1481 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1482 enum tree_code code2, tree op2a, tree op2b);
1483 static tree
1484 or_var_with_comparison (tree var, bool invert,
1485 enum tree_code code2, tree op2a, tree op2b);
1486 static tree
1487 or_var_with_comparison_1 (gimple stmt,
1488 enum tree_code code2, tree op2a, tree op2b);
1490 /* Helper function for and_comparisons_1: try to simplify the AND of the
1491 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1492 If INVERT is true, invert the value of the VAR before doing the AND.
1493 Return NULL_EXPR if we can't simplify this to a single expression. */
1495 static tree
1496 and_var_with_comparison (tree var, bool invert,
1497 enum tree_code code2, tree op2a, tree op2b)
1499 tree t;
1500 gimple stmt = SSA_NAME_DEF_STMT (var);
1502 /* We can only deal with variables whose definitions are assignments. */
1503 if (!is_gimple_assign (stmt))
1504 return NULL_TREE;
1506 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1507 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1508 Then we only have to consider the simpler non-inverted cases. */
1509 if (invert)
1510 t = or_var_with_comparison_1 (stmt,
1511 invert_tree_comparison (code2, false),
1512 op2a, op2b);
1513 else
1514 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1515 return canonicalize_bool (t, invert);
1518 /* Try to simplify the AND of the ssa variable defined by the assignment
1519 STMT with the comparison specified by (OP2A CODE2 OP2B).
1520 Return NULL_EXPR if we can't simplify this to a single expression. */
1522 static tree
1523 and_var_with_comparison_1 (gimple stmt,
1524 enum tree_code code2, tree op2a, tree op2b)
1526 tree var = gimple_assign_lhs (stmt);
1527 tree true_test_var = NULL_TREE;
1528 tree false_test_var = NULL_TREE;
1529 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1531 /* Check for identities like (var AND (var == 0)) => false. */
1532 if (TREE_CODE (op2a) == SSA_NAME
1533 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1535 if ((code2 == NE_EXPR && integer_zerop (op2b))
1536 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1538 true_test_var = op2a;
1539 if (var == true_test_var)
1540 return var;
1542 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1543 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1545 false_test_var = op2a;
1546 if (var == false_test_var)
1547 return boolean_false_node;
1551 /* If the definition is a comparison, recurse on it. */
1552 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1554 tree t = and_comparisons_1 (innercode,
1555 gimple_assign_rhs1 (stmt),
1556 gimple_assign_rhs2 (stmt),
1557 code2,
1558 op2a,
1559 op2b);
1560 if (t)
1561 return t;
1564 /* If the definition is an AND or OR expression, we may be able to
1565 simplify by reassociating. */
1566 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1567 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1569 tree inner1 = gimple_assign_rhs1 (stmt);
1570 tree inner2 = gimple_assign_rhs2 (stmt);
1571 gimple s;
1572 tree t;
1573 tree partial = NULL_TREE;
1574 bool is_and = (innercode == BIT_AND_EXPR);
1576 /* Check for boolean identities that don't require recursive examination
1577 of inner1/inner2:
1578 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1579 inner1 AND (inner1 OR inner2) => inner1
1580 !inner1 AND (inner1 AND inner2) => false
1581 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1582 Likewise for similar cases involving inner2. */
1583 if (inner1 == true_test_var)
1584 return (is_and ? var : inner1);
1585 else if (inner2 == true_test_var)
1586 return (is_and ? var : inner2);
1587 else if (inner1 == false_test_var)
1588 return (is_and
1589 ? boolean_false_node
1590 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1591 else if (inner2 == false_test_var)
1592 return (is_and
1593 ? boolean_false_node
1594 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1596 /* Next, redistribute/reassociate the AND across the inner tests.
1597 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1598 if (TREE_CODE (inner1) == SSA_NAME
1599 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1600 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1601 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1602 gimple_assign_rhs1 (s),
1603 gimple_assign_rhs2 (s),
1604 code2, op2a, op2b)))
1606 /* Handle the AND case, where we are reassociating:
1607 (inner1 AND inner2) AND (op2a code2 op2b)
1608 => (t AND inner2)
1609 If the partial result t is a constant, we win. Otherwise
1610 continue on to try reassociating with the other inner test. */
1611 if (is_and)
1613 if (integer_onep (t))
1614 return inner2;
1615 else if (integer_zerop (t))
1616 return boolean_false_node;
1619 /* Handle the OR case, where we are redistributing:
1620 (inner1 OR inner2) AND (op2a code2 op2b)
1621 => (t OR (inner2 AND (op2a code2 op2b))) */
1622 else if (integer_onep (t))
1623 return boolean_true_node;
1625 /* Save partial result for later. */
1626 partial = t;
1629 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1630 if (TREE_CODE (inner2) == SSA_NAME
1631 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1632 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1633 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1634 gimple_assign_rhs1 (s),
1635 gimple_assign_rhs2 (s),
1636 code2, op2a, op2b)))
1638 /* Handle the AND case, where we are reassociating:
1639 (inner1 AND inner2) AND (op2a code2 op2b)
1640 => (inner1 AND t) */
1641 if (is_and)
1643 if (integer_onep (t))
1644 return inner1;
1645 else if (integer_zerop (t))
1646 return boolean_false_node;
1647 /* If both are the same, we can apply the identity
1648 (x AND x) == x. */
1649 else if (partial && same_bool_result_p (t, partial))
1650 return t;
1653 /* Handle the OR case. where we are redistributing:
1654 (inner1 OR inner2) AND (op2a code2 op2b)
1655 => (t OR (inner1 AND (op2a code2 op2b)))
1656 => (t OR partial) */
1657 else
1659 if (integer_onep (t))
1660 return boolean_true_node;
1661 else if (partial)
1663 /* We already got a simplification for the other
1664 operand to the redistributed OR expression. The
1665 interesting case is when at least one is false.
1666 Or, if both are the same, we can apply the identity
1667 (x OR x) == x. */
1668 if (integer_zerop (partial))
1669 return t;
1670 else if (integer_zerop (t))
1671 return partial;
1672 else if (same_bool_result_p (t, partial))
1673 return t;
1678 return NULL_TREE;
1681 /* Try to simplify the AND of two comparisons defined by
1682 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1683 If this can be done without constructing an intermediate value,
1684 return the resulting tree; otherwise NULL_TREE is returned.
1685 This function is deliberately asymmetric as it recurses on SSA_DEFs
1686 in the first comparison but not the second. */
1688 static tree
1689 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1690 enum tree_code code2, tree op2a, tree op2b)
1692 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1694 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1695 if (operand_equal_p (op1a, op2a, 0)
1696 && operand_equal_p (op1b, op2b, 0))
1698 /* Result will be either NULL_TREE, or a combined comparison. */
1699 tree t = combine_comparisons (UNKNOWN_LOCATION,
1700 TRUTH_ANDIF_EXPR, code1, code2,
1701 truth_type, op1a, op1b);
1702 if (t)
1703 return t;
1706 /* Likewise the swapped case of the above. */
1707 if (operand_equal_p (op1a, op2b, 0)
1708 && operand_equal_p (op1b, op2a, 0))
1710 /* Result will be either NULL_TREE, or a combined comparison. */
1711 tree t = combine_comparisons (UNKNOWN_LOCATION,
1712 TRUTH_ANDIF_EXPR, code1,
1713 swap_tree_comparison (code2),
1714 truth_type, op1a, op1b);
1715 if (t)
1716 return t;
1719 /* If both comparisons are of the same value against constants, we might
1720 be able to merge them. */
1721 if (operand_equal_p (op1a, op2a, 0)
1722 && TREE_CODE (op1b) == INTEGER_CST
1723 && TREE_CODE (op2b) == INTEGER_CST)
1725 int cmp = tree_int_cst_compare (op1b, op2b);
1727 /* If we have (op1a == op1b), we should either be able to
1728 return that or FALSE, depending on whether the constant op1b
1729 also satisfies the other comparison against op2b. */
1730 if (code1 == EQ_EXPR)
1732 bool done = true;
1733 bool val;
1734 switch (code2)
1736 case EQ_EXPR: val = (cmp == 0); break;
1737 case NE_EXPR: val = (cmp != 0); break;
1738 case LT_EXPR: val = (cmp < 0); break;
1739 case GT_EXPR: val = (cmp > 0); break;
1740 case LE_EXPR: val = (cmp <= 0); break;
1741 case GE_EXPR: val = (cmp >= 0); break;
1742 default: done = false;
1744 if (done)
1746 if (val)
1747 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1748 else
1749 return boolean_false_node;
1752 /* Likewise if the second comparison is an == comparison. */
1753 else if (code2 == EQ_EXPR)
1755 bool done = true;
1756 bool val;
1757 switch (code1)
1759 case EQ_EXPR: val = (cmp == 0); break;
1760 case NE_EXPR: val = (cmp != 0); break;
1761 case LT_EXPR: val = (cmp > 0); break;
1762 case GT_EXPR: val = (cmp < 0); break;
1763 case LE_EXPR: val = (cmp >= 0); break;
1764 case GE_EXPR: val = (cmp <= 0); break;
1765 default: done = false;
1767 if (done)
1769 if (val)
1770 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1771 else
1772 return boolean_false_node;
1776 /* Same business with inequality tests. */
1777 else if (code1 == NE_EXPR)
1779 bool val;
1780 switch (code2)
1782 case EQ_EXPR: val = (cmp != 0); break;
1783 case NE_EXPR: val = (cmp == 0); break;
1784 case LT_EXPR: val = (cmp >= 0); break;
1785 case GT_EXPR: val = (cmp <= 0); break;
1786 case LE_EXPR: val = (cmp > 0); break;
1787 case GE_EXPR: val = (cmp < 0); break;
1788 default:
1789 val = false;
1791 if (val)
1792 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1794 else if (code2 == NE_EXPR)
1796 bool val;
1797 switch (code1)
1799 case EQ_EXPR: val = (cmp == 0); break;
1800 case NE_EXPR: val = (cmp != 0); break;
1801 case LT_EXPR: val = (cmp <= 0); break;
1802 case GT_EXPR: val = (cmp >= 0); break;
1803 case LE_EXPR: val = (cmp < 0); break;
1804 case GE_EXPR: val = (cmp > 0); break;
1805 default:
1806 val = false;
1808 if (val)
1809 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1812 /* Chose the more restrictive of two < or <= comparisons. */
1813 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1814 && (code2 == LT_EXPR || code2 == LE_EXPR))
1816 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1817 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1818 else
1819 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1822 /* Likewise chose the more restrictive of two > or >= comparisons. */
1823 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1824 && (code2 == GT_EXPR || code2 == GE_EXPR))
1826 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1827 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1828 else
1829 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1832 /* Check for singleton ranges. */
1833 else if (cmp == 0
1834 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1835 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1836 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1838 /* Check for disjoint ranges. */
1839 else if (cmp <= 0
1840 && (code1 == LT_EXPR || code1 == LE_EXPR)
1841 && (code2 == GT_EXPR || code2 == GE_EXPR))
1842 return boolean_false_node;
1843 else if (cmp >= 0
1844 && (code1 == GT_EXPR || code1 == GE_EXPR)
1845 && (code2 == LT_EXPR || code2 == LE_EXPR))
1846 return boolean_false_node;
1849 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1850 NAME's definition is a truth value. See if there are any simplifications
1851 that can be done against the NAME's definition. */
1852 if (TREE_CODE (op1a) == SSA_NAME
1853 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1854 && (integer_zerop (op1b) || integer_onep (op1b)))
1856 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1857 || (code1 == NE_EXPR && integer_onep (op1b)));
1858 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1859 switch (gimple_code (stmt))
1861 case GIMPLE_ASSIGN:
1862 /* Try to simplify by copy-propagating the definition. */
1863 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1865 case GIMPLE_PHI:
1866 /* If every argument to the PHI produces the same result when
1867 ANDed with the second comparison, we win.
1868 Do not do this unless the type is bool since we need a bool
1869 result here anyway. */
1870 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1872 tree result = NULL_TREE;
1873 unsigned i;
1874 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1876 tree arg = gimple_phi_arg_def (stmt, i);
1878 /* If this PHI has itself as an argument, ignore it.
1879 If all the other args produce the same result,
1880 we're still OK. */
1881 if (arg == gimple_phi_result (stmt))
1882 continue;
1883 else if (TREE_CODE (arg) == INTEGER_CST)
1885 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1887 if (!result)
1888 result = boolean_false_node;
1889 else if (!integer_zerop (result))
1890 return NULL_TREE;
1892 else if (!result)
1893 result = fold_build2 (code2, boolean_type_node,
1894 op2a, op2b);
1895 else if (!same_bool_comparison_p (result,
1896 code2, op2a, op2b))
1897 return NULL_TREE;
1899 else if (TREE_CODE (arg) == SSA_NAME
1900 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1902 tree temp;
1903 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1904 /* In simple cases we can look through PHI nodes,
1905 but we have to be careful with loops.
1906 See PR49073. */
1907 if (! dom_info_available_p (CDI_DOMINATORS)
1908 || gimple_bb (def_stmt) == gimple_bb (stmt)
1909 || dominated_by_p (CDI_DOMINATORS,
1910 gimple_bb (def_stmt),
1911 gimple_bb (stmt)))
1912 return NULL_TREE;
1913 temp = and_var_with_comparison (arg, invert, code2,
1914 op2a, op2b);
1915 if (!temp)
1916 return NULL_TREE;
1917 else if (!result)
1918 result = temp;
1919 else if (!same_bool_result_p (result, temp))
1920 return NULL_TREE;
1922 else
1923 return NULL_TREE;
1925 return result;
1928 default:
1929 break;
1932 return NULL_TREE;
1935 /* Try to simplify the AND of two comparisons, specified by
1936 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1937 If this can be simplified to a single expression (without requiring
1938 introducing more SSA variables to hold intermediate values),
1939 return the resulting tree. Otherwise return NULL_TREE.
1940 If the result expression is non-null, it has boolean type. */
1942 tree
1943 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1944 enum tree_code code2, tree op2a, tree op2b)
1946 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1947 if (t)
1948 return t;
1949 else
1950 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1953 /* Helper function for or_comparisons_1: try to simplify the OR of the
1954 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1955 If INVERT is true, invert the value of VAR before doing the OR.
1956 Return NULL_EXPR if we can't simplify this to a single expression. */
1958 static tree
1959 or_var_with_comparison (tree var, bool invert,
1960 enum tree_code code2, tree op2a, tree op2b)
1962 tree t;
1963 gimple stmt = SSA_NAME_DEF_STMT (var);
1965 /* We can only deal with variables whose definitions are assignments. */
1966 if (!is_gimple_assign (stmt))
1967 return NULL_TREE;
1969 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1970 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1971 Then we only have to consider the simpler non-inverted cases. */
1972 if (invert)
1973 t = and_var_with_comparison_1 (stmt,
1974 invert_tree_comparison (code2, false),
1975 op2a, op2b);
1976 else
1977 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1978 return canonicalize_bool (t, invert);
1981 /* Try to simplify the OR of the ssa variable defined by the assignment
1982 STMT with the comparison specified by (OP2A CODE2 OP2B).
1983 Return NULL_EXPR if we can't simplify this to a single expression. */
1985 static tree
1986 or_var_with_comparison_1 (gimple stmt,
1987 enum tree_code code2, tree op2a, tree op2b)
1989 tree var = gimple_assign_lhs (stmt);
1990 tree true_test_var = NULL_TREE;
1991 tree false_test_var = NULL_TREE;
1992 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1994 /* Check for identities like (var OR (var != 0)) => true . */
1995 if (TREE_CODE (op2a) == SSA_NAME
1996 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1998 if ((code2 == NE_EXPR && integer_zerop (op2b))
1999 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2001 true_test_var = op2a;
2002 if (var == true_test_var)
2003 return var;
2005 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2006 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2008 false_test_var = op2a;
2009 if (var == false_test_var)
2010 return boolean_true_node;
2014 /* If the definition is a comparison, recurse on it. */
2015 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2017 tree t = or_comparisons_1 (innercode,
2018 gimple_assign_rhs1 (stmt),
2019 gimple_assign_rhs2 (stmt),
2020 code2,
2021 op2a,
2022 op2b);
2023 if (t)
2024 return t;
2027 /* If the definition is an AND or OR expression, we may be able to
2028 simplify by reassociating. */
2029 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2030 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2032 tree inner1 = gimple_assign_rhs1 (stmt);
2033 tree inner2 = gimple_assign_rhs2 (stmt);
2034 gimple s;
2035 tree t;
2036 tree partial = NULL_TREE;
2037 bool is_or = (innercode == BIT_IOR_EXPR);
2039 /* Check for boolean identities that don't require recursive examination
2040 of inner1/inner2:
2041 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2042 inner1 OR (inner1 AND inner2) => inner1
2043 !inner1 OR (inner1 OR inner2) => true
2044 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2046 if (inner1 == true_test_var)
2047 return (is_or ? var : inner1);
2048 else if (inner2 == true_test_var)
2049 return (is_or ? var : inner2);
2050 else if (inner1 == false_test_var)
2051 return (is_or
2052 ? boolean_true_node
2053 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2054 else if (inner2 == false_test_var)
2055 return (is_or
2056 ? boolean_true_node
2057 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2059 /* Next, redistribute/reassociate the OR across the inner tests.
2060 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2061 if (TREE_CODE (inner1) == SSA_NAME
2062 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2063 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2064 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2065 gimple_assign_rhs1 (s),
2066 gimple_assign_rhs2 (s),
2067 code2, op2a, op2b)))
2069 /* Handle the OR case, where we are reassociating:
2070 (inner1 OR inner2) OR (op2a code2 op2b)
2071 => (t OR inner2)
2072 If the partial result t is a constant, we win. Otherwise
2073 continue on to try reassociating with the other inner test. */
2074 if (is_or)
2076 if (integer_onep (t))
2077 return boolean_true_node;
2078 else if (integer_zerop (t))
2079 return inner2;
2082 /* Handle the AND case, where we are redistributing:
2083 (inner1 AND inner2) OR (op2a code2 op2b)
2084 => (t AND (inner2 OR (op2a code op2b))) */
2085 else if (integer_zerop (t))
2086 return boolean_false_node;
2088 /* Save partial result for later. */
2089 partial = t;
2092 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2093 if (TREE_CODE (inner2) == SSA_NAME
2094 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2095 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2096 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2097 gimple_assign_rhs1 (s),
2098 gimple_assign_rhs2 (s),
2099 code2, op2a, op2b)))
2101 /* Handle the OR case, where we are reassociating:
2102 (inner1 OR inner2) OR (op2a code2 op2b)
2103 => (inner1 OR t)
2104 => (t OR partial) */
2105 if (is_or)
2107 if (integer_zerop (t))
2108 return inner1;
2109 else if (integer_onep (t))
2110 return boolean_true_node;
2111 /* If both are the same, we can apply the identity
2112 (x OR x) == x. */
2113 else if (partial && same_bool_result_p (t, partial))
2114 return t;
2117 /* Handle the AND case, where we are redistributing:
2118 (inner1 AND inner2) OR (op2a code2 op2b)
2119 => (t AND (inner1 OR (op2a code2 op2b)))
2120 => (t AND partial) */
2121 else
2123 if (integer_zerop (t))
2124 return boolean_false_node;
2125 else if (partial)
2127 /* We already got a simplification for the other
2128 operand to the redistributed AND expression. The
2129 interesting case is when at least one is true.
2130 Or, if both are the same, we can apply the identity
2131 (x AND x) == x. */
2132 if (integer_onep (partial))
2133 return t;
2134 else if (integer_onep (t))
2135 return partial;
2136 else if (same_bool_result_p (t, partial))
2137 return t;
2142 return NULL_TREE;
2145 /* Try to simplify the OR of two comparisons defined by
2146 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2147 If this can be done without constructing an intermediate value,
2148 return the resulting tree; otherwise NULL_TREE is returned.
2149 This function is deliberately asymmetric as it recurses on SSA_DEFs
2150 in the first comparison but not the second. */
2152 static tree
2153 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2154 enum tree_code code2, tree op2a, tree op2b)
2156 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2158 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2159 if (operand_equal_p (op1a, op2a, 0)
2160 && operand_equal_p (op1b, op2b, 0))
2162 /* Result will be either NULL_TREE, or a combined comparison. */
2163 tree t = combine_comparisons (UNKNOWN_LOCATION,
2164 TRUTH_ORIF_EXPR, code1, code2,
2165 truth_type, op1a, op1b);
2166 if (t)
2167 return t;
2170 /* Likewise the swapped case of the above. */
2171 if (operand_equal_p (op1a, op2b, 0)
2172 && operand_equal_p (op1b, op2a, 0))
2174 /* Result will be either NULL_TREE, or a combined comparison. */
2175 tree t = combine_comparisons (UNKNOWN_LOCATION,
2176 TRUTH_ORIF_EXPR, code1,
2177 swap_tree_comparison (code2),
2178 truth_type, op1a, op1b);
2179 if (t)
2180 return t;
2183 /* If both comparisons are of the same value against constants, we might
2184 be able to merge them. */
2185 if (operand_equal_p (op1a, op2a, 0)
2186 && TREE_CODE (op1b) == INTEGER_CST
2187 && TREE_CODE (op2b) == INTEGER_CST)
2189 int cmp = tree_int_cst_compare (op1b, op2b);
2191 /* If we have (op1a != op1b), we should either be able to
2192 return that or TRUE, depending on whether the constant op1b
2193 also satisfies the other comparison against op2b. */
2194 if (code1 == NE_EXPR)
2196 bool done = true;
2197 bool val;
2198 switch (code2)
2200 case EQ_EXPR: val = (cmp == 0); break;
2201 case NE_EXPR: val = (cmp != 0); break;
2202 case LT_EXPR: val = (cmp < 0); break;
2203 case GT_EXPR: val = (cmp > 0); break;
2204 case LE_EXPR: val = (cmp <= 0); break;
2205 case GE_EXPR: val = (cmp >= 0); break;
2206 default: done = false;
2208 if (done)
2210 if (val)
2211 return boolean_true_node;
2212 else
2213 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2216 /* Likewise if the second comparison is a != comparison. */
2217 else if (code2 == NE_EXPR)
2219 bool done = true;
2220 bool val;
2221 switch (code1)
2223 case EQ_EXPR: val = (cmp == 0); break;
2224 case NE_EXPR: val = (cmp != 0); break;
2225 case LT_EXPR: val = (cmp > 0); break;
2226 case GT_EXPR: val = (cmp < 0); break;
2227 case LE_EXPR: val = (cmp >= 0); break;
2228 case GE_EXPR: val = (cmp <= 0); break;
2229 default: done = false;
2231 if (done)
2233 if (val)
2234 return boolean_true_node;
2235 else
2236 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2240 /* See if an equality test is redundant with the other comparison. */
2241 else if (code1 == EQ_EXPR)
2243 bool val;
2244 switch (code2)
2246 case EQ_EXPR: val = (cmp == 0); break;
2247 case NE_EXPR: val = (cmp != 0); break;
2248 case LT_EXPR: val = (cmp < 0); break;
2249 case GT_EXPR: val = (cmp > 0); break;
2250 case LE_EXPR: val = (cmp <= 0); break;
2251 case GE_EXPR: val = (cmp >= 0); break;
2252 default:
2253 val = false;
2255 if (val)
2256 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2258 else if (code2 == EQ_EXPR)
2260 bool val;
2261 switch (code1)
2263 case EQ_EXPR: val = (cmp == 0); break;
2264 case NE_EXPR: val = (cmp != 0); break;
2265 case LT_EXPR: val = (cmp > 0); break;
2266 case GT_EXPR: val = (cmp < 0); break;
2267 case LE_EXPR: val = (cmp >= 0); break;
2268 case GE_EXPR: val = (cmp <= 0); break;
2269 default:
2270 val = false;
2272 if (val)
2273 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2276 /* Chose the less restrictive of two < or <= comparisons. */
2277 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2278 && (code2 == LT_EXPR || code2 == LE_EXPR))
2280 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2281 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2282 else
2283 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2286 /* Likewise chose the less restrictive of two > or >= comparisons. */
2287 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2288 && (code2 == GT_EXPR || code2 == GE_EXPR))
2290 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2291 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2292 else
2293 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2296 /* Check for singleton ranges. */
2297 else if (cmp == 0
2298 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2299 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2300 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2302 /* Check for less/greater pairs that don't restrict the range at all. */
2303 else if (cmp >= 0
2304 && (code1 == LT_EXPR || code1 == LE_EXPR)
2305 && (code2 == GT_EXPR || code2 == GE_EXPR))
2306 return boolean_true_node;
2307 else if (cmp <= 0
2308 && (code1 == GT_EXPR || code1 == GE_EXPR)
2309 && (code2 == LT_EXPR || code2 == LE_EXPR))
2310 return boolean_true_node;
2313 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2314 NAME's definition is a truth value. See if there are any simplifications
2315 that can be done against the NAME's definition. */
2316 if (TREE_CODE (op1a) == SSA_NAME
2317 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2318 && (integer_zerop (op1b) || integer_onep (op1b)))
2320 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2321 || (code1 == NE_EXPR && integer_onep (op1b)));
2322 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2323 switch (gimple_code (stmt))
2325 case GIMPLE_ASSIGN:
2326 /* Try to simplify by copy-propagating the definition. */
2327 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2329 case GIMPLE_PHI:
2330 /* If every argument to the PHI produces the same result when
2331 ORed with the second comparison, we win.
2332 Do not do this unless the type is bool since we need a bool
2333 result here anyway. */
2334 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2336 tree result = NULL_TREE;
2337 unsigned i;
2338 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2340 tree arg = gimple_phi_arg_def (stmt, i);
2342 /* If this PHI has itself as an argument, ignore it.
2343 If all the other args produce the same result,
2344 we're still OK. */
2345 if (arg == gimple_phi_result (stmt))
2346 continue;
2347 else if (TREE_CODE (arg) == INTEGER_CST)
2349 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2351 if (!result)
2352 result = boolean_true_node;
2353 else if (!integer_onep (result))
2354 return NULL_TREE;
2356 else if (!result)
2357 result = fold_build2 (code2, boolean_type_node,
2358 op2a, op2b);
2359 else if (!same_bool_comparison_p (result,
2360 code2, op2a, op2b))
2361 return NULL_TREE;
2363 else if (TREE_CODE (arg) == SSA_NAME
2364 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2366 tree temp;
2367 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2368 /* In simple cases we can look through PHI nodes,
2369 but we have to be careful with loops.
2370 See PR49073. */
2371 if (! dom_info_available_p (CDI_DOMINATORS)
2372 || gimple_bb (def_stmt) == gimple_bb (stmt)
2373 || dominated_by_p (CDI_DOMINATORS,
2374 gimple_bb (def_stmt),
2375 gimple_bb (stmt)))
2376 return NULL_TREE;
2377 temp = or_var_with_comparison (arg, invert, code2,
2378 op2a, op2b);
2379 if (!temp)
2380 return NULL_TREE;
2381 else if (!result)
2382 result = temp;
2383 else if (!same_bool_result_p (result, temp))
2384 return NULL_TREE;
2386 else
2387 return NULL_TREE;
2389 return result;
2392 default:
2393 break;
2396 return NULL_TREE;
2399 /* Try to simplify the OR of two comparisons, specified by
2400 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2401 If this can be simplified to a single expression (without requiring
2402 introducing more SSA variables to hold intermediate values),
2403 return the resulting tree. Otherwise return NULL_TREE.
2404 If the result expression is non-null, it has boolean type. */
2406 tree
2407 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2408 enum tree_code code2, tree op2a, tree op2b)
2410 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2411 if (t)
2412 return t;
2413 else
2414 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2418 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2420 Either NULL_TREE, a simplified but non-constant or a constant
2421 is returned.
2423 ??? This should go into a gimple-fold-inline.h file to be eventually
2424 privatized with the single valueize function used in the various TUs
2425 to avoid the indirect function call overhead. */
2427 tree
2428 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2430 location_t loc = gimple_location (stmt);
2431 switch (gimple_code (stmt))
2433 case GIMPLE_ASSIGN:
2435 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2437 switch (get_gimple_rhs_class (subcode))
2439 case GIMPLE_SINGLE_RHS:
2441 tree rhs = gimple_assign_rhs1 (stmt);
2442 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2444 if (TREE_CODE (rhs) == SSA_NAME)
2446 /* If the RHS is an SSA_NAME, return its known constant value,
2447 if any. */
2448 return (*valueize) (rhs);
2450 /* Handle propagating invariant addresses into address
2451 operations. */
2452 else if (TREE_CODE (rhs) == ADDR_EXPR
2453 && !is_gimple_min_invariant (rhs))
2455 HOST_WIDE_INT offset = 0;
2456 tree base;
2457 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2458 &offset,
2459 valueize);
2460 if (base
2461 && (CONSTANT_CLASS_P (base)
2462 || decl_address_invariant_p (base)))
2463 return build_invariant_address (TREE_TYPE (rhs),
2464 base, offset);
2466 else if (TREE_CODE (rhs) == CONSTRUCTOR
2467 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2468 && (CONSTRUCTOR_NELTS (rhs)
2469 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2471 unsigned i;
2472 tree val, *vec;
2474 vec = XALLOCAVEC (tree,
2475 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2476 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2478 val = (*valueize) (val);
2479 if (TREE_CODE (val) == INTEGER_CST
2480 || TREE_CODE (val) == REAL_CST
2481 || TREE_CODE (val) == FIXED_CST)
2482 vec[i] = val;
2483 else
2484 return NULL_TREE;
2487 return build_vector (TREE_TYPE (rhs), vec);
2490 if (kind == tcc_reference)
2492 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2493 || TREE_CODE (rhs) == REALPART_EXPR
2494 || TREE_CODE (rhs) == IMAGPART_EXPR)
2495 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2497 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2498 return fold_unary_loc (EXPR_LOCATION (rhs),
2499 TREE_CODE (rhs),
2500 TREE_TYPE (rhs), val);
2502 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2503 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2505 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2506 return fold_ternary_loc (EXPR_LOCATION (rhs),
2507 TREE_CODE (rhs),
2508 TREE_TYPE (rhs), val,
2509 TREE_OPERAND (rhs, 1),
2510 TREE_OPERAND (rhs, 2));
2512 else if (TREE_CODE (rhs) == MEM_REF
2513 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2515 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2516 if (TREE_CODE (val) == ADDR_EXPR
2517 && is_gimple_min_invariant (val))
2519 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2520 unshare_expr (val),
2521 TREE_OPERAND (rhs, 1));
2522 if (tem)
2523 rhs = tem;
2526 return fold_const_aggregate_ref_1 (rhs, valueize);
2528 else if (kind == tcc_declaration)
2529 return get_symbol_constant_value (rhs);
2530 return rhs;
2533 case GIMPLE_UNARY_RHS:
2535 /* Handle unary operators that can appear in GIMPLE form.
2536 Note that we know the single operand must be a constant,
2537 so this should almost always return a simplified RHS. */
2538 tree lhs = gimple_assign_lhs (stmt);
2539 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2541 /* Conversions are useless for CCP purposes if they are
2542 value-preserving. Thus the restrictions that
2543 useless_type_conversion_p places for restrict qualification
2544 of pointer types should not apply here.
2545 Substitution later will only substitute to allowed places. */
2546 if (CONVERT_EXPR_CODE_P (subcode)
2547 && POINTER_TYPE_P (TREE_TYPE (lhs))
2548 && POINTER_TYPE_P (TREE_TYPE (op0))
2549 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2550 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2551 && TYPE_MODE (TREE_TYPE (lhs))
2552 == TYPE_MODE (TREE_TYPE (op0)))
2553 return op0;
2555 return
2556 fold_unary_ignore_overflow_loc (loc, subcode,
2557 gimple_expr_type (stmt), op0);
2560 case GIMPLE_BINARY_RHS:
2562 /* Handle binary operators that can appear in GIMPLE form. */
2563 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2564 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2566 /* Translate &x + CST into an invariant form suitable for
2567 further propagation. */
2568 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2569 && TREE_CODE (op0) == ADDR_EXPR
2570 && TREE_CODE (op1) == INTEGER_CST)
2572 tree off = fold_convert (ptr_type_node, op1);
2573 return build_fold_addr_expr_loc
2574 (loc,
2575 fold_build2 (MEM_REF,
2576 TREE_TYPE (TREE_TYPE (op0)),
2577 unshare_expr (op0), off));
2580 return fold_binary_loc (loc, subcode,
2581 gimple_expr_type (stmt), op0, op1);
2584 case GIMPLE_TERNARY_RHS:
2586 /* Handle ternary operators that can appear in GIMPLE form. */
2587 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2588 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2589 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2591 /* Fold embedded expressions in ternary codes. */
2592 if ((subcode == COND_EXPR
2593 || subcode == VEC_COND_EXPR)
2594 && COMPARISON_CLASS_P (op0))
2596 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2597 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2598 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2599 TREE_TYPE (op0), op00, op01);
2600 if (tem)
2601 op0 = tem;
2604 return fold_ternary_loc (loc, subcode,
2605 gimple_expr_type (stmt), op0, op1, op2);
2608 default:
2609 gcc_unreachable ();
2613 case GIMPLE_CALL:
2615 tree fn;
2617 if (gimple_call_internal_p (stmt))
2618 /* No folding yet for these functions. */
2619 return NULL_TREE;
2621 fn = (*valueize) (gimple_call_fn (stmt));
2622 if (TREE_CODE (fn) == ADDR_EXPR
2623 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2624 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2626 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2627 tree call, retval;
2628 unsigned i;
2629 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2630 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2631 call = build_call_array_loc (loc,
2632 gimple_call_return_type (stmt),
2633 fn, gimple_call_num_args (stmt), args);
2634 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2635 if (retval)
2636 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2637 STRIP_NOPS (retval);
2638 return retval;
2640 return NULL_TREE;
2643 default:
2644 return NULL_TREE;
2648 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2649 Returns NULL_TREE if folding to a constant is not possible, otherwise
2650 returns a constant according to is_gimple_min_invariant. */
2652 tree
2653 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2655 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2656 if (res && is_gimple_min_invariant (res))
2657 return res;
2658 return NULL_TREE;
2662 /* The following set of functions are supposed to fold references using
2663 their constant initializers. */
2665 static tree fold_ctor_reference (tree type, tree ctor,
2666 unsigned HOST_WIDE_INT offset,
2667 unsigned HOST_WIDE_INT size, tree);
2669 /* See if we can find constructor defining value of BASE.
2670 When we know the consructor with constant offset (such as
2671 base is array[40] and we do know constructor of array), then
2672 BIT_OFFSET is adjusted accordingly.
2674 As a special case, return error_mark_node when constructor
2675 is not explicitly available, but it is known to be zero
2676 such as 'static const int a;'. */
2677 static tree
2678 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2679 tree (*valueize)(tree))
2681 HOST_WIDE_INT bit_offset2, size, max_size;
2682 if (TREE_CODE (base) == MEM_REF)
2684 if (!integer_zerop (TREE_OPERAND (base, 1)))
2686 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2687 return NULL_TREE;
2688 *bit_offset += (mem_ref_offset (base).low
2689 * BITS_PER_UNIT);
2692 if (valueize
2693 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2694 base = valueize (TREE_OPERAND (base, 0));
2695 if (!base || TREE_CODE (base) != ADDR_EXPR)
2696 return NULL_TREE;
2697 base = TREE_OPERAND (base, 0);
2700 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2701 DECL_INITIAL. If BASE is a nested reference into another
2702 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2703 the inner reference. */
2704 switch (TREE_CODE (base))
2706 case VAR_DECL:
2707 if (!const_value_known_p (base))
2708 return NULL_TREE;
2710 /* Fallthru. */
2711 case CONST_DECL:
2712 if (!DECL_INITIAL (base)
2713 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2714 return error_mark_node;
2715 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2716 as special marker (_not_ zero ...) for its own purposes. */
2717 if (DECL_INITIAL (base) == error_mark_node)
2718 return NULL_TREE;
2719 return DECL_INITIAL (base);
2721 case ARRAY_REF:
2722 case COMPONENT_REF:
2723 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2724 if (max_size == -1 || size != max_size)
2725 return NULL_TREE;
2726 *bit_offset += bit_offset2;
2727 return get_base_constructor (base, bit_offset, valueize);
2729 case STRING_CST:
2730 case CONSTRUCTOR:
2731 return base;
2733 default:
2734 return NULL_TREE;
2738 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2739 to the memory at bit OFFSET.
2741 We do only simple job of folding byte accesses. */
2743 static tree
2744 fold_string_cst_ctor_reference (tree type, tree ctor,
2745 unsigned HOST_WIDE_INT offset,
2746 unsigned HOST_WIDE_INT size)
2748 if (INTEGRAL_TYPE_P (type)
2749 && (TYPE_MODE (type)
2750 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2751 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2752 == MODE_INT)
2753 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2754 && size == BITS_PER_UNIT
2755 && !(offset % BITS_PER_UNIT))
2757 offset /= BITS_PER_UNIT;
2758 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2759 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2760 [offset]));
2761 /* Folding
2762 const char a[20]="hello";
2763 return a[10];
2765 might lead to offset greater than string length. In this case we
2766 know value is either initialized to 0 or out of bounds. Return 0
2767 in both cases. */
2768 return build_zero_cst (type);
2770 return NULL_TREE;
2773 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2774 SIZE to the memory at bit OFFSET. */
2776 static tree
2777 fold_array_ctor_reference (tree type, tree ctor,
2778 unsigned HOST_WIDE_INT offset,
2779 unsigned HOST_WIDE_INT size,
2780 tree from_decl)
2782 unsigned HOST_WIDE_INT cnt;
2783 tree cfield, cval;
2784 double_int low_bound, elt_size;
2785 double_int index, max_index;
2786 double_int access_index;
2787 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2788 HOST_WIDE_INT inner_offset;
2790 /* Compute low bound and elt size. */
2791 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2792 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2793 if (domain_type && TYPE_MIN_VALUE (domain_type))
2795 /* Static constructors for variably sized objects makes no sense. */
2796 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2797 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2798 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2800 else
2801 low_bound = double_int_zero;
2802 /* Static constructors for variably sized objects makes no sense. */
2803 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2804 == INTEGER_CST);
2805 elt_size =
2806 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2809 /* We can handle only constantly sized accesses that are known to not
2810 be larger than size of array element. */
2811 if (!TYPE_SIZE_UNIT (type)
2812 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2813 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2814 return NULL_TREE;
2816 /* Compute the array index we look for. */
2817 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2818 .udiv (elt_size, TRUNC_DIV_EXPR);
2819 access_index += low_bound;
2820 if (index_type)
2821 access_index = access_index.ext (TYPE_PRECISION (index_type),
2822 TYPE_UNSIGNED (index_type));
2824 /* And offset within the access. */
2825 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2827 /* See if the array field is large enough to span whole access. We do not
2828 care to fold accesses spanning multiple array indexes. */
2829 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2830 return NULL_TREE;
2832 index = low_bound - double_int_one;
2833 if (index_type)
2834 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2836 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2838 /* Array constructor might explicitely set index, or specify range
2839 or leave index NULL meaning that it is next index after previous
2840 one. */
2841 if (cfield)
2843 if (TREE_CODE (cfield) == INTEGER_CST)
2844 max_index = index = tree_to_double_int (cfield);
2845 else
2847 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2848 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2849 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2852 else
2854 index += double_int_one;
2855 if (index_type)
2856 index = index.ext (TYPE_PRECISION (index_type),
2857 TYPE_UNSIGNED (index_type));
2858 max_index = index;
2861 /* Do we have match? */
2862 if (access_index.cmp (index, 1) >= 0
2863 && access_index.cmp (max_index, 1) <= 0)
2864 return fold_ctor_reference (type, cval, inner_offset, size,
2865 from_decl);
2867 /* When memory is not explicitely mentioned in constructor,
2868 it is 0 (or out of range). */
2869 return build_zero_cst (type);
2872 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2873 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2875 static tree
2876 fold_nonarray_ctor_reference (tree type, tree ctor,
2877 unsigned HOST_WIDE_INT offset,
2878 unsigned HOST_WIDE_INT size,
2879 tree from_decl)
2881 unsigned HOST_WIDE_INT cnt;
2882 tree cfield, cval;
2884 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2885 cval)
2887 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2888 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2889 tree field_size = DECL_SIZE (cfield);
2890 double_int bitoffset;
2891 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2892 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2893 double_int bitoffset_end, access_end;
2895 /* Variable sized objects in static constructors makes no sense,
2896 but field_size can be NULL for flexible array members. */
2897 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2898 && TREE_CODE (byte_offset) == INTEGER_CST
2899 && (field_size != NULL_TREE
2900 ? TREE_CODE (field_size) == INTEGER_CST
2901 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2903 /* Compute bit offset of the field. */
2904 bitoffset = tree_to_double_int (field_offset)
2905 + byte_offset_cst * bits_per_unit_cst;
2906 /* Compute bit offset where the field ends. */
2907 if (field_size != NULL_TREE)
2908 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2909 else
2910 bitoffset_end = double_int_zero;
2912 access_end = double_int::from_uhwi (offset)
2913 + double_int::from_uhwi (size);
2915 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2916 [BITOFFSET, BITOFFSET_END)? */
2917 if (access_end.cmp (bitoffset, 0) > 0
2918 && (field_size == NULL_TREE
2919 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2921 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2922 /* We do have overlap. Now see if field is large enough to
2923 cover the access. Give up for accesses spanning multiple
2924 fields. */
2925 if (access_end.cmp (bitoffset_end, 0) > 0)
2926 return NULL_TREE;
2927 if (double_int::from_uhwi (offset).slt (bitoffset))
2928 return NULL_TREE;
2929 return fold_ctor_reference (type, cval,
2930 inner_offset.to_uhwi (), size,
2931 from_decl);
2934 /* When memory is not explicitely mentioned in constructor, it is 0. */
2935 return build_zero_cst (type);
2938 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2939 to the memory at bit OFFSET. */
2941 static tree
2942 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2943 unsigned HOST_WIDE_INT size, tree from_decl)
2945 tree ret;
2947 /* We found the field with exact match. */
2948 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2949 && !offset)
2950 return canonicalize_constructor_val (ctor, from_decl);
2952 /* We are at the end of walk, see if we can view convert the
2953 result. */
2954 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2955 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2956 && operand_equal_p (TYPE_SIZE (type),
2957 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2959 ret = canonicalize_constructor_val (ctor, from_decl);
2960 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2961 if (ret)
2962 STRIP_NOPS (ret);
2963 return ret;
2965 if (TREE_CODE (ctor) == STRING_CST)
2966 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2967 if (TREE_CODE (ctor) == CONSTRUCTOR)
2970 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2971 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2972 return fold_array_ctor_reference (type, ctor, offset, size,
2973 from_decl);
2974 else
2975 return fold_nonarray_ctor_reference (type, ctor, offset, size,
2976 from_decl);
2979 return NULL_TREE;
2982 /* Return the tree representing the element referenced by T if T is an
2983 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2984 names using VALUEIZE. Return NULL_TREE otherwise. */
2986 tree
2987 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2989 tree ctor, idx, base;
2990 HOST_WIDE_INT offset, size, max_size;
2991 tree tem;
2993 if (TREE_THIS_VOLATILE (t))
2994 return NULL_TREE;
2996 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2997 return get_symbol_constant_value (t);
2999 tem = fold_read_from_constant_string (t);
3000 if (tem)
3001 return tem;
3003 switch (TREE_CODE (t))
3005 case ARRAY_REF:
3006 case ARRAY_RANGE_REF:
3007 /* Constant indexes are handled well by get_base_constructor.
3008 Only special case variable offsets.
3009 FIXME: This code can't handle nested references with variable indexes
3010 (they will be handled only by iteration of ccp). Perhaps we can bring
3011 get_ref_base_and_extent here and make it use a valueize callback. */
3012 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3013 && valueize
3014 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3015 && TREE_CODE (idx) == INTEGER_CST)
3017 tree low_bound, unit_size;
3018 double_int doffset;
3020 /* If the resulting bit-offset is constant, track it. */
3021 if ((low_bound = array_ref_low_bound (t),
3022 TREE_CODE (low_bound) == INTEGER_CST)
3023 && (unit_size = array_ref_element_size (t),
3024 host_integerp (unit_size, 1))
3025 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3026 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3027 doffset.fits_shwi ()))
3029 offset = doffset.to_shwi ();
3030 offset *= TREE_INT_CST_LOW (unit_size);
3031 offset *= BITS_PER_UNIT;
3033 base = TREE_OPERAND (t, 0);
3034 ctor = get_base_constructor (base, &offset, valueize);
3035 /* Empty constructor. Always fold to 0. */
3036 if (ctor == error_mark_node)
3037 return build_zero_cst (TREE_TYPE (t));
3038 /* Out of bound array access. Value is undefined,
3039 but don't fold. */
3040 if (offset < 0)
3041 return NULL_TREE;
3042 /* We can not determine ctor. */
3043 if (!ctor)
3044 return NULL_TREE;
3045 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3046 TREE_INT_CST_LOW (unit_size)
3047 * BITS_PER_UNIT,
3048 base);
3051 /* Fallthru. */
3053 case COMPONENT_REF:
3054 case BIT_FIELD_REF:
3055 case TARGET_MEM_REF:
3056 case MEM_REF:
3057 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3058 ctor = get_base_constructor (base, &offset, valueize);
3060 /* Empty constructor. Always fold to 0. */
3061 if (ctor == error_mark_node)
3062 return build_zero_cst (TREE_TYPE (t));
3063 /* We do not know precise address. */
3064 if (max_size == -1 || max_size != size)
3065 return NULL_TREE;
3066 /* We can not determine ctor. */
3067 if (!ctor)
3068 return NULL_TREE;
3070 /* Out of bound array access. Value is undefined, but don't fold. */
3071 if (offset < 0)
3072 return NULL_TREE;
3074 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3075 base);
3077 case REALPART_EXPR:
3078 case IMAGPART_EXPR:
3080 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3081 if (c && TREE_CODE (c) == COMPLEX_CST)
3082 return fold_build1_loc (EXPR_LOCATION (t),
3083 TREE_CODE (t), TREE_TYPE (t), c);
3084 break;
3087 default:
3088 break;
3091 return NULL_TREE;
3094 tree
3095 fold_const_aggregate_ref (tree t)
3097 return fold_const_aggregate_ref_1 (t, NULL);
3100 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3101 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3102 KNOWN_BINFO carries the binfo describing the true type of
3103 OBJ_TYPE_REF_OBJECT(REF). */
3105 tree
3106 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3108 unsigned HOST_WIDE_INT offset, size;
3109 tree v, fn, vtable;
3111 vtable = v = BINFO_VTABLE (known_binfo);
3112 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3113 if (!v)
3114 return NULL_TREE;
3116 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3118 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3119 v = TREE_OPERAND (v, 0);
3121 else
3122 offset = 0;
3124 if (TREE_CODE (v) != ADDR_EXPR)
3125 return NULL_TREE;
3126 v = TREE_OPERAND (v, 0);
3128 if (TREE_CODE (v) != VAR_DECL
3129 || !DECL_VIRTUAL_P (v)
3130 || !DECL_INITIAL (v)
3131 || DECL_INITIAL (v) == error_mark_node)
3132 return NULL_TREE;
3133 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3134 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3135 offset += token * size;
3136 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3137 offset, size, vtable);
3138 if (!fn || integer_zerop (fn))
3139 return NULL_TREE;
3140 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3141 || TREE_CODE (fn) == FDESC_EXPR);
3142 fn = TREE_OPERAND (fn, 0);
3143 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3145 /* When cgraph node is missing and function is not public, we cannot
3146 devirtualize. This can happen in WHOPR when the actual method
3147 ends up in other partition, because we found devirtualization
3148 possibility too late. */
3149 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3150 return NULL_TREE;
3152 /* Make sure we create a cgraph node for functions we'll reference.
3153 They can be non-existent if the reference comes from an entry
3154 of an external vtable for example. */
3155 cgraph_get_create_node (fn);
3157 return fn;
3160 /* Return true iff VAL is a gimple expression that is known to be
3161 non-negative. Restricted to floating-point inputs. */
3163 bool
3164 gimple_val_nonnegative_real_p (tree val)
3166 gimple def_stmt;
3168 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3170 /* Use existing logic for non-gimple trees. */
3171 if (tree_expr_nonnegative_p (val))
3172 return true;
3174 if (TREE_CODE (val) != SSA_NAME)
3175 return false;
3177 /* Currently we look only at the immediately defining statement
3178 to make this determination, since recursion on defining
3179 statements of operands can lead to quadratic behavior in the
3180 worst case. This is expected to catch almost all occurrences
3181 in practice. It would be possible to implement limited-depth
3182 recursion if important cases are lost. Alternatively, passes
3183 that need this information (such as the pow/powi lowering code
3184 in the cse_sincos pass) could be revised to provide it through
3185 dataflow propagation. */
3187 def_stmt = SSA_NAME_DEF_STMT (val);
3189 if (is_gimple_assign (def_stmt))
3191 tree op0, op1;
3193 /* See fold-const.c:tree_expr_nonnegative_p for additional
3194 cases that could be handled with recursion. */
3196 switch (gimple_assign_rhs_code (def_stmt))
3198 case ABS_EXPR:
3199 /* Always true for floating-point operands. */
3200 return true;
3202 case MULT_EXPR:
3203 /* True if the two operands are identical (since we are
3204 restricted to floating-point inputs). */
3205 op0 = gimple_assign_rhs1 (def_stmt);
3206 op1 = gimple_assign_rhs2 (def_stmt);
3208 if (op0 == op1
3209 || operand_equal_p (op0, op1, 0))
3210 return true;
3212 default:
3213 return false;
3216 else if (is_gimple_call (def_stmt))
3218 tree fndecl = gimple_call_fndecl (def_stmt);
3219 if (fndecl
3220 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3222 tree arg1;
3224 switch (DECL_FUNCTION_CODE (fndecl))
3226 CASE_FLT_FN (BUILT_IN_ACOS):
3227 CASE_FLT_FN (BUILT_IN_ACOSH):
3228 CASE_FLT_FN (BUILT_IN_CABS):
3229 CASE_FLT_FN (BUILT_IN_COSH):
3230 CASE_FLT_FN (BUILT_IN_ERFC):
3231 CASE_FLT_FN (BUILT_IN_EXP):
3232 CASE_FLT_FN (BUILT_IN_EXP10):
3233 CASE_FLT_FN (BUILT_IN_EXP2):
3234 CASE_FLT_FN (BUILT_IN_FABS):
3235 CASE_FLT_FN (BUILT_IN_FDIM):
3236 CASE_FLT_FN (BUILT_IN_HYPOT):
3237 CASE_FLT_FN (BUILT_IN_POW10):
3238 return true;
3240 CASE_FLT_FN (BUILT_IN_SQRT):
3241 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3242 nonnegative inputs. */
3243 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3244 return true;
3246 break;
3248 CASE_FLT_FN (BUILT_IN_POWI):
3249 /* True if the second argument is an even integer. */
3250 arg1 = gimple_call_arg (def_stmt, 1);
3252 if (TREE_CODE (arg1) == INTEGER_CST
3253 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3254 return true;
3256 break;
3258 CASE_FLT_FN (BUILT_IN_POW):
3259 /* True if the second argument is an even integer-valued
3260 real. */
3261 arg1 = gimple_call_arg (def_stmt, 1);
3263 if (TREE_CODE (arg1) == REAL_CST)
3265 REAL_VALUE_TYPE c;
3266 HOST_WIDE_INT n;
3268 c = TREE_REAL_CST (arg1);
3269 n = real_to_integer (&c);
3271 if ((n & 1) == 0)
3273 REAL_VALUE_TYPE cint;
3274 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3275 if (real_identical (&c, &cint))
3276 return true;
3280 break;
3282 default:
3283 return false;
3288 return false;