* sv.po: Update.
[official-gcc.git] / gcc / gimple-fold.c
blobe561a63fdb8fea63d325c30cebb12944e485e181
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
34 /* Return true when DECL can be referenced from current unit.
35 We can get declarations that are not possible to reference for
36 various reasons:
38 1) When analyzing C++ virtual tables.
39 C++ virtual tables do have known constructors even
40 when they are keyed to other compilation unit.
41 Those tables can contain pointers to methods and vars
42 in other units. Those methods have both STATIC and EXTERNAL
43 set.
44 2) In WHOPR mode devirtualization might lead to reference
45 to method that was partitioned elsehwere.
46 In this case we have static VAR_DECL or FUNCTION_DECL
47 that has no corresponding callgraph/varpool node
48 declaring the body.
49 3) COMDAT functions referred by external vtables that
50 we devirtualize only during final copmilation stage.
51 At this time we already decided that we will not output
52 the function body and thus we can't reference the symbol
53 directly. */
55 static bool
56 can_refer_decl_in_current_unit_p (tree decl)
58 struct varpool_node *vnode;
59 struct cgraph_node *node;
61 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
62 return true;
63 /* External flag is set, so we deal with C++ reference
64 to static object from other file. */
65 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
66 && TREE_CODE (decl) == VAR_DECL)
68 /* Just be sure it is not big in frontend setting
69 flags incorrectly. Those variables should never
70 be finalized. */
71 gcc_checking_assert (!(vnode = varpool_get_node (decl))
72 || !vnode->finalized);
73 return false;
75 /* When function is public, we always can introduce new reference.
76 Exception are the COMDAT functions where introducing a direct
77 reference imply need to include function body in the curren tunit. */
78 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
79 return true;
80 /* We are not at ltrans stage; so don't worry about WHOPR.
81 Also when still gimplifying all referred comdat functions will be
82 produced. */
83 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
84 return true;
85 /* If we already output the function body, we are safe. */
86 if (TREE_ASM_WRITTEN (decl))
87 return true;
88 if (TREE_CODE (decl) == FUNCTION_DECL)
90 node = cgraph_get_node (decl);
91 /* Check that we still have function body and that we didn't took
92 the decision to eliminate offline copy of the function yet.
93 The second is important when devirtualization happens during final
94 compilation stage when making a new reference no longer makes callee
95 to be compiled. */
96 if (!node || !node->analyzed || node->global.inlined_to)
97 return false;
99 else if (TREE_CODE (decl) == VAR_DECL)
101 vnode = varpool_get_node (decl);
102 if (!vnode || !vnode->finalized)
103 return false;
105 return true;
108 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
109 acceptable form for is_gimple_min_invariant. */
111 tree
112 canonicalize_constructor_val (tree cval)
114 STRIP_NOPS (cval);
115 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
117 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
118 TREE_OPERAND (cval, 0),
119 TREE_OPERAND (cval, 1),
120 TREE_TYPE (cval));
121 if (t)
122 cval = t;
124 if (TREE_CODE (cval) == ADDR_EXPR)
126 tree base = get_base_address (TREE_OPERAND (cval, 0));
128 if (base
129 && (TREE_CODE (base) == VAR_DECL
130 || TREE_CODE (base) == FUNCTION_DECL)
131 && !can_refer_decl_in_current_unit_p (base))
132 return NULL_TREE;
133 if (base && TREE_CODE (base) == VAR_DECL)
134 add_referenced_var (base);
136 return cval;
139 /* If SYM is a constant variable with known value, return the value.
140 NULL_TREE is returned otherwise. */
142 tree
143 get_symbol_constant_value (tree sym)
145 if (const_value_known_p (sym))
147 tree val = DECL_INITIAL (sym);
148 if (val)
150 val = canonicalize_constructor_val (val);
151 if (val && is_gimple_min_invariant (val))
152 return val;
153 else
154 return NULL_TREE;
156 /* Variables declared 'const' without an initializer
157 have zero as the initializer if they may not be
158 overridden at link or run time. */
159 if (!val
160 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
161 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
162 return build_zero_cst (TREE_TYPE (sym));
165 return NULL_TREE;
169 /* Return true if we may propagate the address expression ADDR into the
170 dereference DEREF and cancel them. */
172 bool
173 may_propagate_address_into_dereference (tree addr, tree deref)
175 gcc_assert (TREE_CODE (deref) == MEM_REF
176 && TREE_CODE (addr) == ADDR_EXPR);
178 /* Don't propagate if ADDR's operand has incomplete type. */
179 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
180 return false;
182 /* If the address is invariant then we do not need to preserve restrict
183 qualifications. But we do need to preserve volatile qualifiers until
184 we can annotate the folded dereference itself properly. */
185 if (is_gimple_min_invariant (addr)
186 && (!TREE_THIS_VOLATILE (deref)
187 || TYPE_VOLATILE (TREE_TYPE (addr))))
188 return useless_type_conversion_p (TREE_TYPE (deref),
189 TREE_TYPE (TREE_OPERAND (addr, 0)));
191 /* Else both the address substitution and the folding must result in
192 a valid useless type conversion sequence. */
193 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
194 TREE_TYPE (addr))
195 && useless_type_conversion_p (TREE_TYPE (deref),
196 TREE_TYPE (TREE_OPERAND (addr, 0))));
200 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
201 BASE is an array type. OFFSET is a byte displacement.
203 LOC is the location of the original expression. */
205 static tree
206 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
208 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
209 tree array_type, elt_type, elt_size;
210 tree domain_type;
212 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
213 measured in units of the size of elements type) from that ARRAY_REF).
214 We can't do anything if either is variable.
216 The case we handle here is *(&A[N]+O). */
217 if (TREE_CODE (base) == ARRAY_REF)
219 tree low_bound = array_ref_low_bound (base);
221 elt_offset = TREE_OPERAND (base, 1);
222 if (TREE_CODE (low_bound) != INTEGER_CST
223 || TREE_CODE (elt_offset) != INTEGER_CST)
224 return NULL_TREE;
226 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
227 base = TREE_OPERAND (base, 0);
230 /* Ignore stupid user tricks of indexing non-array variables. */
231 array_type = TREE_TYPE (base);
232 if (TREE_CODE (array_type) != ARRAY_TYPE)
233 return NULL_TREE;
234 elt_type = TREE_TYPE (array_type);
236 /* Use signed size type for intermediate computation on the index. */
237 idx_type = ssizetype;
239 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
240 element type (so we can use the alignment if it's not constant).
241 Otherwise, compute the offset as an index by using a division. If the
242 division isn't exact, then don't do anything. */
243 elt_size = TYPE_SIZE_UNIT (elt_type);
244 if (!elt_size)
245 return NULL;
246 if (integer_zerop (offset))
248 if (TREE_CODE (elt_size) != INTEGER_CST)
249 elt_size = size_int (TYPE_ALIGN (elt_type));
251 idx = build_int_cst (idx_type, 0);
253 else
255 unsigned HOST_WIDE_INT lquo, lrem;
256 HOST_WIDE_INT hquo, hrem;
257 double_int soffset;
259 /* The final array offset should be signed, so we need
260 to sign-extend the (possibly pointer) offset here
261 and use signed division. */
262 soffset = double_int_sext (tree_to_double_int (offset),
263 TYPE_PRECISION (TREE_TYPE (offset)));
264 if (TREE_CODE (elt_size) != INTEGER_CST
265 || div_and_round_double (TRUNC_DIV_EXPR, 0,
266 soffset.low, soffset.high,
267 TREE_INT_CST_LOW (elt_size),
268 TREE_INT_CST_HIGH (elt_size),
269 &lquo, &hquo, &lrem, &hrem)
270 || lrem || hrem)
271 return NULL_TREE;
273 idx = build_int_cst_wide (idx_type, lquo, hquo);
276 /* Assume the low bound is zero. If there is a domain type, get the
277 low bound, if any, convert the index into that type, and add the
278 low bound. */
279 min_idx = build_int_cst (idx_type, 0);
280 domain_type = TYPE_DOMAIN (array_type);
281 if (domain_type)
283 idx_type = domain_type;
284 if (TYPE_MIN_VALUE (idx_type))
285 min_idx = TYPE_MIN_VALUE (idx_type);
286 else
287 min_idx = fold_convert (idx_type, min_idx);
289 if (TREE_CODE (min_idx) != INTEGER_CST)
290 return NULL_TREE;
292 elt_offset = fold_convert (idx_type, elt_offset);
295 if (!integer_zerop (min_idx))
296 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
297 if (!integer_zerop (elt_offset))
298 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
300 /* Make sure to possibly truncate late after offsetting. */
301 idx = fold_convert (idx_type, idx);
303 /* We don't want to construct access past array bounds. For example
304 char *(c[4]);
305 c[3][2];
306 should not be simplified into (*c)[14] or tree-vrp will
307 give false warnings.
308 This is only an issue for multi-dimensional arrays. */
309 if (TREE_CODE (elt_type) == ARRAY_TYPE
310 && domain_type)
312 if (TYPE_MAX_VALUE (domain_type)
313 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
314 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
315 return NULL_TREE;
316 else if (TYPE_MIN_VALUE (domain_type)
317 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
318 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
319 return NULL_TREE;
320 else if (compare_tree_int (idx, 0) < 0)
321 return NULL_TREE;
325 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
326 SET_EXPR_LOCATION (t, loc);
327 return t;
332 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
333 LOC is the location of original expression.
335 Before attempting the conversion strip off existing ADDR_EXPRs. */
337 tree
338 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
339 tree orig_type)
341 tree ret;
343 STRIP_NOPS (base);
344 if (TREE_CODE (base) != ADDR_EXPR)
345 return NULL_TREE;
347 base = TREE_OPERAND (base, 0);
348 if (types_compatible_p (orig_type, TREE_TYPE (base))
349 && integer_zerop (offset))
350 return base;
352 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
353 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
354 return ret;
355 return NULL_TREE;
358 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
359 LOC is the location of the original expression. */
361 tree
362 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
363 tree orig_type)
365 tree base, ret;
367 STRIP_NOPS (addr);
368 if (TREE_CODE (addr) != ADDR_EXPR)
369 return NULL_TREE;
370 base = TREE_OPERAND (addr, 0);
371 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
372 if (ret)
374 ret = build_fold_addr_expr (ret);
375 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
376 return NULL_TREE;
377 SET_EXPR_LOCATION (ret, loc);
380 return ret;
384 /* A quaint feature extant in our address arithmetic is that there
385 can be hidden type changes here. The type of the result need
386 not be the same as the type of the input pointer.
388 What we're after here is an expression of the form
389 (T *)(&array + const)
390 where array is OP0, const is OP1, RES_TYPE is T and
391 the cast doesn't actually exist, but is implicit in the
392 type of the POINTER_PLUS_EXPR. We'd like to turn this into
393 &array[x]
394 which may be able to propagate further. */
396 tree
397 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
399 tree ptd_type;
400 tree t;
402 /* The first operand should be an ADDR_EXPR. */
403 if (TREE_CODE (op0) != ADDR_EXPR)
404 return NULL_TREE;
405 op0 = TREE_OPERAND (op0, 0);
407 /* It had better be a constant. */
408 if (TREE_CODE (op1) != INTEGER_CST)
410 /* Or op0 should now be A[0] and the non-constant offset defined
411 via a multiplication by the array element size. */
412 if (TREE_CODE (op0) == ARRAY_REF
413 /* As we will end up creating a variable index array access
414 in the outermost array dimension make sure there isn't
415 a more inner array that the index could overflow to. */
416 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
417 && integer_zerop (TREE_OPERAND (op0, 1))
418 && TREE_CODE (op1) == SSA_NAME)
420 gimple offset_def = SSA_NAME_DEF_STMT (op1);
421 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
422 if (!host_integerp (elsz, 1)
423 || !is_gimple_assign (offset_def))
424 return NULL_TREE;
426 /* Do not build array references of something that we can't
427 see the true number of array dimensions for. */
428 if (!DECL_P (TREE_OPERAND (op0, 0))
429 && !handled_component_p (TREE_OPERAND (op0, 0)))
430 return NULL_TREE;
432 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
433 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
434 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
435 return build_fold_addr_expr
436 (build4 (ARRAY_REF, TREE_TYPE (op0),
437 TREE_OPERAND (op0, 0),
438 gimple_assign_rhs1 (offset_def),
439 TREE_OPERAND (op0, 2),
440 TREE_OPERAND (op0, 3)));
441 else if (integer_onep (elsz)
442 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
443 return build_fold_addr_expr
444 (build4 (ARRAY_REF, TREE_TYPE (op0),
445 TREE_OPERAND (op0, 0),
446 op1,
447 TREE_OPERAND (op0, 2),
448 TREE_OPERAND (op0, 3)));
450 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
451 /* Dto. */
452 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
453 && TREE_CODE (op1) == SSA_NAME)
455 gimple offset_def = SSA_NAME_DEF_STMT (op1);
456 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
457 if (!host_integerp (elsz, 1)
458 || !is_gimple_assign (offset_def))
459 return NULL_TREE;
461 /* Do not build array references of something that we can't
462 see the true number of array dimensions for. */
463 if (!DECL_P (op0)
464 && !handled_component_p (op0))
465 return NULL_TREE;
467 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
468 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
469 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
470 return build_fold_addr_expr
471 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
472 op0, gimple_assign_rhs1 (offset_def),
473 integer_zero_node, NULL_TREE));
474 else if (integer_onep (elsz)
475 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
476 return build_fold_addr_expr
477 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
478 op0, op1,
479 integer_zero_node, NULL_TREE));
482 return NULL_TREE;
485 /* If the first operand is an ARRAY_REF, expand it so that we can fold
486 the offset into it. */
487 while (TREE_CODE (op0) == ARRAY_REF)
489 tree array_obj = TREE_OPERAND (op0, 0);
490 tree array_idx = TREE_OPERAND (op0, 1);
491 tree elt_type = TREE_TYPE (op0);
492 tree elt_size = TYPE_SIZE_UNIT (elt_type);
493 tree min_idx;
495 if (TREE_CODE (array_idx) != INTEGER_CST)
496 break;
497 if (TREE_CODE (elt_size) != INTEGER_CST)
498 break;
500 /* Un-bias the index by the min index of the array type. */
501 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
502 if (min_idx)
504 min_idx = TYPE_MIN_VALUE (min_idx);
505 if (min_idx)
507 if (TREE_CODE (min_idx) != INTEGER_CST)
508 break;
510 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
511 if (!integer_zerop (min_idx))
512 array_idx = int_const_binop (MINUS_EXPR, array_idx,
513 min_idx, 0);
517 /* Convert the index to a byte offset. */
518 array_idx = fold_convert (sizetype, array_idx);
519 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
521 /* Update the operands for the next round, or for folding. */
522 op1 = int_const_binop (PLUS_EXPR,
523 array_idx, op1, 0);
524 op0 = array_obj;
527 ptd_type = TREE_TYPE (res_type);
528 /* If we want a pointer to void, reconstruct the reference from the
529 array element type. A pointer to that can be trivially converted
530 to void *. This happens as we fold (void *)(ptr p+ off). */
531 if (VOID_TYPE_P (ptd_type)
532 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
533 ptd_type = TREE_TYPE (TREE_TYPE (op0));
535 /* At which point we can try some of the same things as for indirects. */
536 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
537 if (t)
539 t = build_fold_addr_expr (t);
540 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
541 return NULL_TREE;
542 SET_EXPR_LOCATION (t, loc);
545 return t;
548 /* Subroutine of fold_stmt. We perform several simplifications of the
549 memory reference tree EXPR and make sure to re-gimplify them properly
550 after propagation of constant addresses. IS_LHS is true if the
551 reference is supposed to be an lvalue. */
553 static tree
554 maybe_fold_reference (tree expr, bool is_lhs)
556 tree *t = &expr;
557 tree result;
559 if (!is_lhs
560 && (result = fold_const_aggregate_ref (expr))
561 && is_gimple_min_invariant (result))
562 return result;
564 /* ??? We might want to open-code the relevant remaining cases
565 to avoid using the generic fold. */
566 if (handled_component_p (*t)
567 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
569 tree tem = fold (*t);
570 if (tem != *t)
571 return tem;
574 while (handled_component_p (*t))
575 t = &TREE_OPERAND (*t, 0);
577 /* Fold back MEM_REFs to reference trees. */
578 if (TREE_CODE (*t) == MEM_REF
579 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
580 && integer_zerop (TREE_OPERAND (*t, 1))
581 && (TREE_THIS_VOLATILE (*t)
582 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
583 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
584 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
585 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
586 /* We have to look out here to not drop a required conversion
587 from the rhs to the lhs if is_lhs, but we don't have the
588 rhs here to verify that. Thus require strict type
589 compatibility. */
590 && types_compatible_p (TREE_TYPE (*t),
591 TREE_TYPE (TREE_OPERAND
592 (TREE_OPERAND (*t, 0), 0))))
594 tree tem;
595 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
596 tem = maybe_fold_reference (expr, is_lhs);
597 if (tem)
598 return tem;
599 return expr;
601 /* Canonicalize MEM_REFs invariant address operand. */
602 else if (TREE_CODE (*t) == MEM_REF
603 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
604 && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
605 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
607 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
608 TREE_OPERAND (*t, 0),
609 TREE_OPERAND (*t, 1));
610 if (tem)
612 *t = tem;
613 tem = maybe_fold_reference (expr, is_lhs);
614 if (tem)
615 return tem;
616 return expr;
619 else if (TREE_CODE (*t) == TARGET_MEM_REF)
621 tree tem = maybe_fold_tmr (*t);
622 if (tem)
624 *t = tem;
625 tem = maybe_fold_reference (expr, is_lhs);
626 if (tem)
627 return tem;
628 return expr;
631 else if (!is_lhs
632 && DECL_P (*t))
634 tree tem = get_symbol_constant_value (*t);
635 if (tem
636 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
638 *t = unshare_expr (tem);
639 tem = maybe_fold_reference (expr, is_lhs);
640 if (tem)
641 return tem;
642 return expr;
646 return NULL_TREE;
650 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
651 replacement rhs for the statement or NULL_TREE if no simplification
652 could be made. It is assumed that the operands have been previously
653 folded. */
655 static tree
656 fold_gimple_assign (gimple_stmt_iterator *si)
658 gimple stmt = gsi_stmt (*si);
659 enum tree_code subcode = gimple_assign_rhs_code (stmt);
660 location_t loc = gimple_location (stmt);
662 tree result = NULL_TREE;
664 switch (get_gimple_rhs_class (subcode))
666 case GIMPLE_SINGLE_RHS:
668 tree rhs = gimple_assign_rhs1 (stmt);
670 /* Try to fold a conditional expression. */
671 if (TREE_CODE (rhs) == COND_EXPR)
673 tree op0 = COND_EXPR_COND (rhs);
674 tree tem;
675 bool set = false;
676 location_t cond_loc = EXPR_LOCATION (rhs);
678 if (COMPARISON_CLASS_P (op0))
680 fold_defer_overflow_warnings ();
681 tem = fold_binary_loc (cond_loc,
682 TREE_CODE (op0), TREE_TYPE (op0),
683 TREE_OPERAND (op0, 0),
684 TREE_OPERAND (op0, 1));
685 /* This is actually a conditional expression, not a GIMPLE
686 conditional statement, however, the valid_gimple_rhs_p
687 test still applies. */
688 set = (tem && is_gimple_condexpr (tem)
689 && valid_gimple_rhs_p (tem));
690 fold_undefer_overflow_warnings (set, stmt, 0);
692 else if (is_gimple_min_invariant (op0))
694 tem = op0;
695 set = true;
697 else
698 return NULL_TREE;
700 if (set)
701 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
702 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
705 else if (REFERENCE_CLASS_P (rhs))
706 return maybe_fold_reference (rhs, false);
708 else if (TREE_CODE (rhs) == ADDR_EXPR)
710 tree ref = TREE_OPERAND (rhs, 0);
711 tree tem = maybe_fold_reference (ref, true);
712 if (tem
713 && TREE_CODE (tem) == MEM_REF
714 && integer_zerop (TREE_OPERAND (tem, 1)))
715 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
716 else if (tem)
717 result = fold_convert (TREE_TYPE (rhs),
718 build_fold_addr_expr_loc (loc, tem));
719 else if (TREE_CODE (ref) == MEM_REF
720 && integer_zerop (TREE_OPERAND (ref, 1)))
721 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
724 else if (TREE_CODE (rhs) == CONSTRUCTOR
725 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
726 && (CONSTRUCTOR_NELTS (rhs)
727 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
729 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
730 unsigned i;
731 tree val;
733 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
734 if (TREE_CODE (val) != INTEGER_CST
735 && TREE_CODE (val) != REAL_CST
736 && TREE_CODE (val) != FIXED_CST)
737 return NULL_TREE;
739 return build_vector_from_ctor (TREE_TYPE (rhs),
740 CONSTRUCTOR_ELTS (rhs));
743 else if (DECL_P (rhs))
744 return unshare_expr (get_symbol_constant_value (rhs));
746 /* If we couldn't fold the RHS, hand over to the generic
747 fold routines. */
748 if (result == NULL_TREE)
749 result = fold (rhs);
751 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
752 that may have been added by fold, and "useless" type
753 conversions that might now be apparent due to propagation. */
754 STRIP_USELESS_TYPE_CONVERSION (result);
756 if (result != rhs && valid_gimple_rhs_p (result))
757 return result;
759 return NULL_TREE;
761 break;
763 case GIMPLE_UNARY_RHS:
765 tree rhs = gimple_assign_rhs1 (stmt);
767 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
768 if (result)
770 /* If the operation was a conversion do _not_ mark a
771 resulting constant with TREE_OVERFLOW if the original
772 constant was not. These conversions have implementation
773 defined behavior and retaining the TREE_OVERFLOW flag
774 here would confuse later passes such as VRP. */
775 if (CONVERT_EXPR_CODE_P (subcode)
776 && TREE_CODE (result) == INTEGER_CST
777 && TREE_CODE (rhs) == INTEGER_CST)
778 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
780 STRIP_USELESS_TYPE_CONVERSION (result);
781 if (valid_gimple_rhs_p (result))
782 return result;
784 else if (CONVERT_EXPR_CODE_P (subcode)
785 && POINTER_TYPE_P (gimple_expr_type (stmt))
786 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
788 tree type = gimple_expr_type (stmt);
789 tree t = maybe_fold_offset_to_address (loc,
790 gimple_assign_rhs1 (stmt),
791 integer_zero_node, type);
792 if (t)
793 return t;
796 break;
798 case GIMPLE_BINARY_RHS:
799 /* Try to fold pointer addition. */
800 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
802 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
803 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
805 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
806 if (!useless_type_conversion_p
807 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
808 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
810 result = maybe_fold_stmt_addition (gimple_location (stmt),
811 type,
812 gimple_assign_rhs1 (stmt),
813 gimple_assign_rhs2 (stmt));
816 if (!result)
817 result = fold_binary_loc (loc, subcode,
818 TREE_TYPE (gimple_assign_lhs (stmt)),
819 gimple_assign_rhs1 (stmt),
820 gimple_assign_rhs2 (stmt));
822 if (result)
824 STRIP_USELESS_TYPE_CONVERSION (result);
825 if (valid_gimple_rhs_p (result))
826 return result;
828 /* Fold might have produced non-GIMPLE, so if we trust it blindly
829 we lose canonicalization opportunities. Do not go again
830 through fold here though, or the same non-GIMPLE will be
831 produced. */
832 if (commutative_tree_code (subcode)
833 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
834 gimple_assign_rhs2 (stmt), false))
835 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
836 gimple_assign_rhs2 (stmt),
837 gimple_assign_rhs1 (stmt));
839 break;
841 case GIMPLE_TERNARY_RHS:
842 result = fold_ternary_loc (loc, subcode,
843 TREE_TYPE (gimple_assign_lhs (stmt)),
844 gimple_assign_rhs1 (stmt),
845 gimple_assign_rhs2 (stmt),
846 gimple_assign_rhs3 (stmt));
848 if (result)
850 STRIP_USELESS_TYPE_CONVERSION (result);
851 if (valid_gimple_rhs_p (result))
852 return result;
854 /* Fold might have produced non-GIMPLE, so if we trust it blindly
855 we lose canonicalization opportunities. Do not go again
856 through fold here though, or the same non-GIMPLE will be
857 produced. */
858 if (commutative_ternary_tree_code (subcode)
859 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
860 gimple_assign_rhs2 (stmt), false))
861 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
862 gimple_assign_rhs2 (stmt),
863 gimple_assign_rhs1 (stmt),
864 gimple_assign_rhs3 (stmt));
866 break;
868 case GIMPLE_INVALID_RHS:
869 gcc_unreachable ();
872 return NULL_TREE;
875 /* Attempt to fold a conditional statement. Return true if any changes were
876 made. We only attempt to fold the condition expression, and do not perform
877 any transformation that would require alteration of the cfg. It is
878 assumed that the operands have been previously folded. */
880 static bool
881 fold_gimple_cond (gimple stmt)
883 tree result = fold_binary_loc (gimple_location (stmt),
884 gimple_cond_code (stmt),
885 boolean_type_node,
886 gimple_cond_lhs (stmt),
887 gimple_cond_rhs (stmt));
889 if (result)
891 STRIP_USELESS_TYPE_CONVERSION (result);
892 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
894 gimple_cond_set_condition_from_tree (stmt, result);
895 return true;
899 return false;
902 /* Convert EXPR into a GIMPLE value suitable for substitution on the
903 RHS of an assignment. Insert the necessary statements before
904 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
905 is replaced. If the call is expected to produces a result, then it
906 is replaced by an assignment of the new RHS to the result variable.
907 If the result is to be ignored, then the call is replaced by a
908 GIMPLE_NOP. A proper VDEF chain is retained by making the first
909 VUSE and the last VDEF of the whole sequence be the same as the replaced
910 statement and using new SSA names for stores in between. */
912 void
913 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
915 tree lhs;
916 tree tmp = NULL_TREE; /* Silence warning. */
917 gimple stmt, new_stmt;
918 gimple_stmt_iterator i;
919 gimple_seq stmts = gimple_seq_alloc();
920 struct gimplify_ctx gctx;
921 gimple last = NULL;
922 gimple laststore = NULL;
923 tree reaching_vuse;
925 stmt = gsi_stmt (*si_p);
927 gcc_assert (is_gimple_call (stmt));
929 lhs = gimple_call_lhs (stmt);
930 reaching_vuse = gimple_vuse (stmt);
932 push_gimplify_context (&gctx);
934 if (lhs == NULL_TREE)
936 gimplify_and_add (expr, &stmts);
937 /* We can end up with folding a memcpy of an empty class assignment
938 which gets optimized away by C++ gimplification. */
939 if (gimple_seq_empty_p (stmts))
941 if (gimple_in_ssa_p (cfun))
943 unlink_stmt_vdef (stmt);
944 release_defs (stmt);
946 gsi_remove (si_p, true);
947 return;
950 else
951 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
953 pop_gimplify_context (NULL);
955 if (gimple_has_location (stmt))
956 annotate_all_with_location (stmts, gimple_location (stmt));
958 /* The replacement can expose previously unreferenced variables. */
959 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
961 if (last)
963 gsi_insert_before (si_p, last, GSI_NEW_STMT);
964 gsi_next (si_p);
966 new_stmt = gsi_stmt (i);
967 if (gimple_in_ssa_p (cfun))
969 find_new_referenced_vars (new_stmt);
970 mark_symbols_for_renaming (new_stmt);
972 /* If the new statement has a VUSE, update it with exact SSA name we
973 know will reach this one. */
974 if (gimple_vuse (new_stmt))
976 /* If we've also seen a previous store create a new VDEF for
977 the latter one, and make that the new reaching VUSE. */
978 if (laststore)
980 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
981 gimple_set_vdef (laststore, reaching_vuse);
982 update_stmt (laststore);
983 laststore = NULL;
985 gimple_set_vuse (new_stmt, reaching_vuse);
986 gimple_set_modified (new_stmt, true);
988 if (gimple_assign_single_p (new_stmt)
989 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
991 laststore = new_stmt;
993 last = new_stmt;
996 if (lhs == NULL_TREE)
998 /* If we replace a call without LHS that has a VDEF and our new
999 sequence ends with a store we must make that store have the same
1000 vdef in order not to break the sequencing. This can happen
1001 for instance when folding memcpy calls into assignments. */
1002 if (gimple_vdef (stmt) && laststore)
1004 gimple_set_vdef (laststore, gimple_vdef (stmt));
1005 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1006 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1007 update_stmt (laststore);
1009 else if (gimple_in_ssa_p (cfun))
1011 unlink_stmt_vdef (stmt);
1012 release_defs (stmt);
1014 new_stmt = last;
1016 else
1018 if (last)
1020 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1021 gsi_next (si_p);
1023 if (laststore && is_gimple_reg (lhs))
1025 gimple_set_vdef (laststore, gimple_vdef (stmt));
1026 update_stmt (laststore);
1027 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1028 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1029 laststore = NULL;
1031 else if (laststore)
1033 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1034 gimple_set_vdef (laststore, reaching_vuse);
1035 update_stmt (laststore);
1036 laststore = NULL;
1038 new_stmt = gimple_build_assign (lhs, tmp);
1039 if (!is_gimple_reg (tmp))
1040 gimple_set_vuse (new_stmt, reaching_vuse);
1041 if (!is_gimple_reg (lhs))
1043 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1044 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1045 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1047 else if (reaching_vuse == gimple_vuse (stmt))
1048 unlink_stmt_vdef (stmt);
1051 gimple_set_location (new_stmt, gimple_location (stmt));
1052 gsi_replace (si_p, new_stmt, false);
1055 /* Return the string length, maximum string length or maximum value of
1056 ARG in LENGTH.
1057 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1058 is not NULL and, for TYPE == 0, its value is not equal to the length
1059 we determine or if we are unable to determine the length or value,
1060 return false. VISITED is a bitmap of visited variables.
1061 TYPE is 0 if string length should be returned, 1 for maximum string
1062 length and 2 for maximum value ARG can have. */
1064 static bool
1065 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1067 tree var, val;
1068 gimple def_stmt;
1070 if (TREE_CODE (arg) != SSA_NAME)
1072 if (TREE_CODE (arg) == COND_EXPR)
1073 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1074 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1075 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1076 else if (TREE_CODE (arg) == ADDR_EXPR
1077 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1078 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1080 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1081 if (TREE_CODE (aop0) == INDIRECT_REF
1082 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1083 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1084 length, visited, type);
1087 if (type == 2)
1089 val = arg;
1090 if (TREE_CODE (val) != INTEGER_CST
1091 || tree_int_cst_sgn (val) < 0)
1092 return false;
1094 else
1095 val = c_strlen (arg, 1);
1096 if (!val)
1097 return false;
1099 if (*length)
1101 if (type > 0)
1103 if (TREE_CODE (*length) != INTEGER_CST
1104 || TREE_CODE (val) != INTEGER_CST)
1105 return false;
1107 if (tree_int_cst_lt (*length, val))
1108 *length = val;
1109 return true;
1111 else if (simple_cst_equal (val, *length) != 1)
1112 return false;
1115 *length = val;
1116 return true;
1119 /* If we were already here, break the infinite cycle. */
1120 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1121 return true;
1123 var = arg;
1124 def_stmt = SSA_NAME_DEF_STMT (var);
1126 switch (gimple_code (def_stmt))
1128 case GIMPLE_ASSIGN:
1129 /* The RHS of the statement defining VAR must either have a
1130 constant length or come from another SSA_NAME with a constant
1131 length. */
1132 if (gimple_assign_single_p (def_stmt)
1133 || gimple_assign_unary_nop_p (def_stmt))
1135 tree rhs = gimple_assign_rhs1 (def_stmt);
1136 return get_maxval_strlen (rhs, length, visited, type);
1138 return false;
1140 case GIMPLE_PHI:
1142 /* All the arguments of the PHI node must have the same constant
1143 length. */
1144 unsigned i;
1146 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1148 tree arg = gimple_phi_arg (def_stmt, i)->def;
1150 /* If this PHI has itself as an argument, we cannot
1151 determine the string length of this argument. However,
1152 if we can find a constant string length for the other
1153 PHI args then we can still be sure that this is a
1154 constant string length. So be optimistic and just
1155 continue with the next argument. */
1156 if (arg == gimple_phi_result (def_stmt))
1157 continue;
1159 if (!get_maxval_strlen (arg, length, visited, type))
1160 return false;
1163 return true;
1165 default:
1166 return false;
1171 /* Fold builtin call in statement STMT. Returns a simplified tree.
1172 We may return a non-constant expression, including another call
1173 to a different function and with different arguments, e.g.,
1174 substituting memcpy for strcpy when the string length is known.
1175 Note that some builtins expand into inline code that may not
1176 be valid in GIMPLE. Callers must take care. */
1178 tree
1179 gimple_fold_builtin (gimple stmt)
1181 tree result, val[3];
1182 tree callee, a;
1183 int arg_idx, type;
1184 bitmap visited;
1185 bool ignore;
1186 int nargs;
1187 location_t loc = gimple_location (stmt);
1189 gcc_assert (is_gimple_call (stmt));
1191 ignore = (gimple_call_lhs (stmt) == NULL);
1193 /* First try the generic builtin folder. If that succeeds, return the
1194 result directly. */
1195 result = fold_call_stmt (stmt, ignore);
1196 if (result)
1198 if (ignore)
1199 STRIP_NOPS (result);
1200 return result;
1203 /* Ignore MD builtins. */
1204 callee = gimple_call_fndecl (stmt);
1205 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1206 return NULL_TREE;
1208 /* If the builtin could not be folded, and it has no argument list,
1209 we're done. */
1210 nargs = gimple_call_num_args (stmt);
1211 if (nargs == 0)
1212 return NULL_TREE;
1214 /* Limit the work only for builtins we know how to simplify. */
1215 switch (DECL_FUNCTION_CODE (callee))
1217 case BUILT_IN_STRLEN:
1218 case BUILT_IN_FPUTS:
1219 case BUILT_IN_FPUTS_UNLOCKED:
1220 arg_idx = 0;
1221 type = 0;
1222 break;
1223 case BUILT_IN_STRCPY:
1224 case BUILT_IN_STRNCPY:
1225 arg_idx = 1;
1226 type = 0;
1227 break;
1228 case BUILT_IN_MEMCPY_CHK:
1229 case BUILT_IN_MEMPCPY_CHK:
1230 case BUILT_IN_MEMMOVE_CHK:
1231 case BUILT_IN_MEMSET_CHK:
1232 case BUILT_IN_STRNCPY_CHK:
1233 arg_idx = 2;
1234 type = 2;
1235 break;
1236 case BUILT_IN_STRCPY_CHK:
1237 case BUILT_IN_STPCPY_CHK:
1238 arg_idx = 1;
1239 type = 1;
1240 break;
1241 case BUILT_IN_SNPRINTF_CHK:
1242 case BUILT_IN_VSNPRINTF_CHK:
1243 arg_idx = 1;
1244 type = 2;
1245 break;
1246 default:
1247 return NULL_TREE;
1250 if (arg_idx >= nargs)
1251 return NULL_TREE;
1253 /* Try to use the dataflow information gathered by the CCP process. */
1254 visited = BITMAP_ALLOC (NULL);
1255 bitmap_clear (visited);
1257 memset (val, 0, sizeof (val));
1258 a = gimple_call_arg (stmt, arg_idx);
1259 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1260 val[arg_idx] = NULL_TREE;
1262 BITMAP_FREE (visited);
1264 result = NULL_TREE;
1265 switch (DECL_FUNCTION_CODE (callee))
1267 case BUILT_IN_STRLEN:
1268 if (val[0] && nargs == 1)
1270 tree new_val =
1271 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1273 /* If the result is not a valid gimple value, or not a cast
1274 of a valid gimple value, then we cannot use the result. */
1275 if (is_gimple_val (new_val)
1276 || (is_gimple_cast (new_val)
1277 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1278 return new_val;
1280 break;
1282 case BUILT_IN_STRCPY:
1283 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1284 result = fold_builtin_strcpy (loc, callee,
1285 gimple_call_arg (stmt, 0),
1286 gimple_call_arg (stmt, 1),
1287 val[1]);
1288 break;
1290 case BUILT_IN_STRNCPY:
1291 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1292 result = fold_builtin_strncpy (loc, callee,
1293 gimple_call_arg (stmt, 0),
1294 gimple_call_arg (stmt, 1),
1295 gimple_call_arg (stmt, 2),
1296 val[1]);
1297 break;
1299 case BUILT_IN_FPUTS:
1300 if (nargs == 2)
1301 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1302 gimple_call_arg (stmt, 1),
1303 ignore, false, val[0]);
1304 break;
1306 case BUILT_IN_FPUTS_UNLOCKED:
1307 if (nargs == 2)
1308 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1309 gimple_call_arg (stmt, 1),
1310 ignore, true, val[0]);
1311 break;
1313 case BUILT_IN_MEMCPY_CHK:
1314 case BUILT_IN_MEMPCPY_CHK:
1315 case BUILT_IN_MEMMOVE_CHK:
1316 case BUILT_IN_MEMSET_CHK:
1317 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1318 result = fold_builtin_memory_chk (loc, callee,
1319 gimple_call_arg (stmt, 0),
1320 gimple_call_arg (stmt, 1),
1321 gimple_call_arg (stmt, 2),
1322 gimple_call_arg (stmt, 3),
1323 val[2], ignore,
1324 DECL_FUNCTION_CODE (callee));
1325 break;
1327 case BUILT_IN_STRCPY_CHK:
1328 case BUILT_IN_STPCPY_CHK:
1329 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1330 result = fold_builtin_stxcpy_chk (loc, callee,
1331 gimple_call_arg (stmt, 0),
1332 gimple_call_arg (stmt, 1),
1333 gimple_call_arg (stmt, 2),
1334 val[1], ignore,
1335 DECL_FUNCTION_CODE (callee));
1336 break;
1338 case BUILT_IN_STRNCPY_CHK:
1339 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1340 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1341 gimple_call_arg (stmt, 1),
1342 gimple_call_arg (stmt, 2),
1343 gimple_call_arg (stmt, 3),
1344 val[2]);
1345 break;
1347 case BUILT_IN_SNPRINTF_CHK:
1348 case BUILT_IN_VSNPRINTF_CHK:
1349 if (val[1] && is_gimple_val (val[1]))
1350 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1351 DECL_FUNCTION_CODE (callee));
1352 break;
1354 default:
1355 gcc_unreachable ();
1358 if (result && ignore)
1359 result = fold_ignored_result (result);
1360 return result;
1363 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1364 it is found or NULL_TREE if it is not. */
1366 static tree
1367 get_base_binfo_for_type (tree binfo, tree type)
1369 int i;
1370 tree base_binfo;
1371 tree res = NULL_TREE;
1373 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1374 if (TREE_TYPE (base_binfo) == type)
1376 gcc_assert (!res);
1377 res = base_binfo;
1380 return res;
1383 /* Return a binfo describing the part of object referenced by expression REF.
1384 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1385 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1386 a simple declaration, indirect reference or an SSA_NAME. If the function
1387 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1388 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1389 Otherwise the first non-artificial field declaration or the base declaration
1390 will be examined to get the encapsulating type. */
1392 tree
1393 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1395 while (true)
1397 if (TREE_CODE (ref) == COMPONENT_REF)
1399 tree par_type;
1400 tree binfo;
1401 tree field = TREE_OPERAND (ref, 1);
1403 if (!DECL_ARTIFICIAL (field))
1405 tree type = TREE_TYPE (field);
1406 if (TREE_CODE (type) == RECORD_TYPE)
1407 return TYPE_BINFO (type);
1408 else
1409 return NULL_TREE;
1412 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1413 binfo = TYPE_BINFO (par_type);
1414 if (!binfo
1415 || BINFO_N_BASE_BINFOS (binfo) == 0)
1416 return NULL_TREE;
1418 /* Offset 0 indicates the primary base, whose vtable contents are
1419 represented in the binfo for the derived class. */
1420 if (int_bit_position (field) != 0)
1422 tree d_binfo;
1424 /* Get descendant binfo. */
1425 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1426 known_binfo);
1427 if (!d_binfo)
1428 return NULL_TREE;
1429 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1432 ref = TREE_OPERAND (ref, 0);
1434 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1435 return TYPE_BINFO (TREE_TYPE (ref));
1436 else if (known_binfo
1437 && (TREE_CODE (ref) == SSA_NAME
1438 || TREE_CODE (ref) == MEM_REF))
1439 return known_binfo;
1440 else
1441 return NULL_TREE;
1445 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1446 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1447 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1449 tree
1450 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1452 HOST_WIDE_INT i;
1453 tree v, fndecl, delta;
1455 v = BINFO_VIRTUALS (known_binfo);
1456 i = 0;
1457 while (i != token)
1459 i += (TARGET_VTABLE_USES_DESCRIPTORS
1460 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1461 v = TREE_CHAIN (v);
1464 fndecl = TREE_VALUE (v);
1465 delta = TREE_PURPOSE (v);
1466 gcc_assert (host_integerp (delta, 0));
1468 if (integer_nonzerop (delta))
1470 struct cgraph_node *node = cgraph_get_node (fndecl);
1471 HOST_WIDE_INT off = tree_low_cst (delta, 0);
1473 if (!node)
1474 return NULL;
1475 for (node = node->same_body; node; node = node->next)
1476 if (node->thunk.thunk_p && off == node->thunk.fixed_offset)
1477 break;
1478 if (node)
1479 fndecl = node->decl;
1480 else
1481 return NULL;
1484 /* When cgraph node is missing and function is not public, we cannot
1485 devirtualize. This can happen in WHOPR when the actual method
1486 ends up in other partition, because we found devirtualization
1487 possibility too late. */
1488 if (!can_refer_decl_in_current_unit_p (fndecl))
1489 return NULL;
1490 return build_fold_addr_expr (fndecl);
1494 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1495 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1496 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1497 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1498 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1500 tree
1501 gimple_fold_obj_type_ref (tree ref, tree known_type)
1503 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1504 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1505 tree binfo;
1507 if (TREE_CODE (obj) == ADDR_EXPR)
1508 obj = TREE_OPERAND (obj, 0);
1510 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1511 if (binfo)
1513 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1514 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1515 if (!BINFO_VIRTUALS (binfo))
1516 return NULL_TREE;
1517 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1519 else
1520 return NULL_TREE;
1523 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1524 The statement may be replaced by another statement, e.g., if the call
1525 simplifies to a constant value. Return true if any changes were made.
1526 It is assumed that the operands have been previously folded. */
1528 static bool
1529 fold_gimple_call (gimple_stmt_iterator *gsi, bool inplace)
1531 gimple stmt = gsi_stmt (*gsi);
1533 tree callee = gimple_call_fndecl (stmt);
1535 /* Check for builtins that CCP can handle using information not
1536 available in the generic fold routines. */
1537 if (!inplace && callee && DECL_BUILT_IN (callee))
1539 tree result = gimple_fold_builtin (stmt);
1541 if (result)
1543 if (!update_call_from_tree (gsi, result))
1544 gimplify_and_update_call_from_tree (gsi, result);
1545 return true;
1548 else
1550 /* ??? Should perhaps do this in fold proper. However, doing it
1551 there requires that we create a new CALL_EXPR, and that requires
1552 copying EH region info to the new node. Easier to just do it
1553 here where we can just smash the call operand. */
1554 callee = gimple_call_fn (stmt);
1555 if (TREE_CODE (callee) == OBJ_TYPE_REF
1556 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1558 tree t;
1560 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1561 if (t)
1563 gimple_call_set_fn (stmt, t);
1564 return true;
1569 return false;
1572 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1573 distinguishes both cases. */
1575 static bool
1576 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1578 bool changed = false;
1579 gimple stmt = gsi_stmt (*gsi);
1580 unsigned i;
1582 /* Fold the main computation performed by the statement. */
1583 switch (gimple_code (stmt))
1585 case GIMPLE_ASSIGN:
1587 unsigned old_num_ops = gimple_num_ops (stmt);
1588 tree new_rhs = fold_gimple_assign (gsi);
1589 tree lhs = gimple_assign_lhs (stmt);
1590 if (new_rhs
1591 && !useless_type_conversion_p (TREE_TYPE (lhs),
1592 TREE_TYPE (new_rhs)))
1593 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1594 if (new_rhs
1595 && (!inplace
1596 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1598 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1599 changed = true;
1601 break;
1604 case GIMPLE_COND:
1605 changed |= fold_gimple_cond (stmt);
1606 break;
1608 case GIMPLE_CALL:
1609 /* Fold *& in call arguments. */
1610 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1611 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1613 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1614 if (tmp)
1616 gimple_call_set_arg (stmt, i, tmp);
1617 changed = true;
1620 changed |= fold_gimple_call (gsi, inplace);
1621 break;
1623 case GIMPLE_ASM:
1624 /* Fold *& in asm operands. */
1625 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1627 tree link = gimple_asm_output_op (stmt, i);
1628 tree op = TREE_VALUE (link);
1629 if (REFERENCE_CLASS_P (op)
1630 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1632 TREE_VALUE (link) = op;
1633 changed = true;
1636 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1638 tree link = gimple_asm_input_op (stmt, i);
1639 tree op = TREE_VALUE (link);
1640 if (REFERENCE_CLASS_P (op)
1641 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1643 TREE_VALUE (link) = op;
1644 changed = true;
1647 break;
1649 case GIMPLE_DEBUG:
1650 if (gimple_debug_bind_p (stmt))
1652 tree val = gimple_debug_bind_get_value (stmt);
1653 if (val
1654 && REFERENCE_CLASS_P (val))
1656 tree tem = maybe_fold_reference (val, false);
1657 if (tem)
1659 gimple_debug_bind_set_value (stmt, tem);
1660 changed = true;
1664 break;
1666 default:;
1669 stmt = gsi_stmt (*gsi);
1671 /* Fold *& on the lhs. */
1672 if (gimple_has_lhs (stmt))
1674 tree lhs = gimple_get_lhs (stmt);
1675 if (lhs && REFERENCE_CLASS_P (lhs))
1677 tree new_lhs = maybe_fold_reference (lhs, true);
1678 if (new_lhs)
1680 gimple_set_lhs (stmt, new_lhs);
1681 changed = true;
1686 return changed;
1689 /* Fold the statement pointed to by GSI. In some cases, this function may
1690 replace the whole statement with a new one. Returns true iff folding
1691 makes any changes.
1692 The statement pointed to by GSI should be in valid gimple form but may
1693 be in unfolded state as resulting from for example constant propagation
1694 which can produce *&x = 0. */
1696 bool
1697 fold_stmt (gimple_stmt_iterator *gsi)
1699 return fold_stmt_1 (gsi, false);
1702 /* Perform the minimal folding on statement STMT. Only operations like
1703 *&x created by constant propagation are handled. The statement cannot
1704 be replaced with a new one. Return true if the statement was
1705 changed, false otherwise.
1706 The statement STMT should be in valid gimple form but may
1707 be in unfolded state as resulting from for example constant propagation
1708 which can produce *&x = 0. */
1710 bool
1711 fold_stmt_inplace (gimple stmt)
1713 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1714 bool changed = fold_stmt_1 (&gsi, true);
1715 gcc_assert (gsi_stmt (gsi) == stmt);
1716 return changed;
1719 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1720 if EXPR is null or we don't know how.
1721 If non-null, the result always has boolean type. */
1723 static tree
1724 canonicalize_bool (tree expr, bool invert)
1726 if (!expr)
1727 return NULL_TREE;
1728 else if (invert)
1730 if (integer_nonzerop (expr))
1731 return boolean_false_node;
1732 else if (integer_zerop (expr))
1733 return boolean_true_node;
1734 else if (TREE_CODE (expr) == SSA_NAME)
1735 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1736 build_int_cst (TREE_TYPE (expr), 0));
1737 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1738 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1739 boolean_type_node,
1740 TREE_OPERAND (expr, 0),
1741 TREE_OPERAND (expr, 1));
1742 else
1743 return NULL_TREE;
1745 else
1747 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1748 return expr;
1749 if (integer_nonzerop (expr))
1750 return boolean_true_node;
1751 else if (integer_zerop (expr))
1752 return boolean_false_node;
1753 else if (TREE_CODE (expr) == SSA_NAME)
1754 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1755 build_int_cst (TREE_TYPE (expr), 0));
1756 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1757 return fold_build2 (TREE_CODE (expr),
1758 boolean_type_node,
1759 TREE_OPERAND (expr, 0),
1760 TREE_OPERAND (expr, 1));
1761 else
1762 return NULL_TREE;
1766 /* Check to see if a boolean expression EXPR is logically equivalent to the
1767 comparison (OP1 CODE OP2). Check for various identities involving
1768 SSA_NAMEs. */
1770 static bool
1771 same_bool_comparison_p (const_tree expr, enum tree_code code,
1772 const_tree op1, const_tree op2)
1774 gimple s;
1776 /* The obvious case. */
1777 if (TREE_CODE (expr) == code
1778 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1779 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1780 return true;
1782 /* Check for comparing (name, name != 0) and the case where expr
1783 is an SSA_NAME with a definition matching the comparison. */
1784 if (TREE_CODE (expr) == SSA_NAME
1785 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1787 if (operand_equal_p (expr, op1, 0))
1788 return ((code == NE_EXPR && integer_zerop (op2))
1789 || (code == EQ_EXPR && integer_nonzerop (op2)));
1790 s = SSA_NAME_DEF_STMT (expr);
1791 if (is_gimple_assign (s)
1792 && gimple_assign_rhs_code (s) == code
1793 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1794 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1795 return true;
1798 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1799 of name is a comparison, recurse. */
1800 if (TREE_CODE (op1) == SSA_NAME
1801 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1803 s = SSA_NAME_DEF_STMT (op1);
1804 if (is_gimple_assign (s)
1805 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1807 enum tree_code c = gimple_assign_rhs_code (s);
1808 if ((c == NE_EXPR && integer_zerop (op2))
1809 || (c == EQ_EXPR && integer_nonzerop (op2)))
1810 return same_bool_comparison_p (expr, c,
1811 gimple_assign_rhs1 (s),
1812 gimple_assign_rhs2 (s));
1813 if ((c == EQ_EXPR && integer_zerop (op2))
1814 || (c == NE_EXPR && integer_nonzerop (op2)))
1815 return same_bool_comparison_p (expr,
1816 invert_tree_comparison (c, false),
1817 gimple_assign_rhs1 (s),
1818 gimple_assign_rhs2 (s));
1821 return false;
1824 /* Check to see if two boolean expressions OP1 and OP2 are logically
1825 equivalent. */
1827 static bool
1828 same_bool_result_p (const_tree op1, const_tree op2)
1830 /* Simple cases first. */
1831 if (operand_equal_p (op1, op2, 0))
1832 return true;
1834 /* Check the cases where at least one of the operands is a comparison.
1835 These are a bit smarter than operand_equal_p in that they apply some
1836 identifies on SSA_NAMEs. */
1837 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1838 && same_bool_comparison_p (op1, TREE_CODE (op2),
1839 TREE_OPERAND (op2, 0),
1840 TREE_OPERAND (op2, 1)))
1841 return true;
1842 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1843 && same_bool_comparison_p (op2, TREE_CODE (op1),
1844 TREE_OPERAND (op1, 0),
1845 TREE_OPERAND (op1, 1)))
1846 return true;
1848 /* Default case. */
1849 return false;
1852 /* Forward declarations for some mutually recursive functions. */
1854 static tree
1855 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1856 enum tree_code code2, tree op2a, tree op2b);
1857 static tree
1858 and_var_with_comparison (tree var, bool invert,
1859 enum tree_code code2, tree op2a, tree op2b);
1860 static tree
1861 and_var_with_comparison_1 (gimple stmt,
1862 enum tree_code code2, tree op2a, tree op2b);
1863 static tree
1864 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1865 enum tree_code code2, tree op2a, tree op2b);
1866 static tree
1867 or_var_with_comparison (tree var, bool invert,
1868 enum tree_code code2, tree op2a, tree op2b);
1869 static tree
1870 or_var_with_comparison_1 (gimple stmt,
1871 enum tree_code code2, tree op2a, tree op2b);
1873 /* Helper function for and_comparisons_1: try to simplify the AND of the
1874 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1875 If INVERT is true, invert the value of the VAR before doing the AND.
1876 Return NULL_EXPR if we can't simplify this to a single expression. */
1878 static tree
1879 and_var_with_comparison (tree var, bool invert,
1880 enum tree_code code2, tree op2a, tree op2b)
1882 tree t;
1883 gimple stmt = SSA_NAME_DEF_STMT (var);
1885 /* We can only deal with variables whose definitions are assignments. */
1886 if (!is_gimple_assign (stmt))
1887 return NULL_TREE;
1889 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1890 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1891 Then we only have to consider the simpler non-inverted cases. */
1892 if (invert)
1893 t = or_var_with_comparison_1 (stmt,
1894 invert_tree_comparison (code2, false),
1895 op2a, op2b);
1896 else
1897 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1898 return canonicalize_bool (t, invert);
1901 /* Try to simplify the AND of the ssa variable defined by the assignment
1902 STMT with the comparison specified by (OP2A CODE2 OP2B).
1903 Return NULL_EXPR if we can't simplify this to a single expression. */
1905 static tree
1906 and_var_with_comparison_1 (gimple stmt,
1907 enum tree_code code2, tree op2a, tree op2b)
1909 tree var = gimple_assign_lhs (stmt);
1910 tree true_test_var = NULL_TREE;
1911 tree false_test_var = NULL_TREE;
1912 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1914 /* Check for identities like (var AND (var == 0)) => false. */
1915 if (TREE_CODE (op2a) == SSA_NAME
1916 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1918 if ((code2 == NE_EXPR && integer_zerop (op2b))
1919 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1921 true_test_var = op2a;
1922 if (var == true_test_var)
1923 return var;
1925 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1926 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1928 false_test_var = op2a;
1929 if (var == false_test_var)
1930 return boolean_false_node;
1934 /* If the definition is a comparison, recurse on it. */
1935 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1937 tree t = and_comparisons_1 (innercode,
1938 gimple_assign_rhs1 (stmt),
1939 gimple_assign_rhs2 (stmt),
1940 code2,
1941 op2a,
1942 op2b);
1943 if (t)
1944 return t;
1947 /* If the definition is an AND or OR expression, we may be able to
1948 simplify by reassociating. */
1949 if (innercode == TRUTH_AND_EXPR
1950 || innercode == TRUTH_OR_EXPR
1951 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1952 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1954 tree inner1 = gimple_assign_rhs1 (stmt);
1955 tree inner2 = gimple_assign_rhs2 (stmt);
1956 gimple s;
1957 tree t;
1958 tree partial = NULL_TREE;
1959 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1961 /* Check for boolean identities that don't require recursive examination
1962 of inner1/inner2:
1963 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1964 inner1 AND (inner1 OR inner2) => inner1
1965 !inner1 AND (inner1 AND inner2) => false
1966 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1967 Likewise for similar cases involving inner2. */
1968 if (inner1 == true_test_var)
1969 return (is_and ? var : inner1);
1970 else if (inner2 == true_test_var)
1971 return (is_and ? var : inner2);
1972 else if (inner1 == false_test_var)
1973 return (is_and
1974 ? boolean_false_node
1975 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1976 else if (inner2 == false_test_var)
1977 return (is_and
1978 ? boolean_false_node
1979 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1981 /* Next, redistribute/reassociate the AND across the inner tests.
1982 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1983 if (TREE_CODE (inner1) == SSA_NAME
1984 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1985 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1986 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1987 gimple_assign_rhs1 (s),
1988 gimple_assign_rhs2 (s),
1989 code2, op2a, op2b)))
1991 /* Handle the AND case, where we are reassociating:
1992 (inner1 AND inner2) AND (op2a code2 op2b)
1993 => (t AND inner2)
1994 If the partial result t is a constant, we win. Otherwise
1995 continue on to try reassociating with the other inner test. */
1996 if (is_and)
1998 if (integer_onep (t))
1999 return inner2;
2000 else if (integer_zerop (t))
2001 return boolean_false_node;
2004 /* Handle the OR case, where we are redistributing:
2005 (inner1 OR inner2) AND (op2a code2 op2b)
2006 => (t OR (inner2 AND (op2a code2 op2b))) */
2007 else
2009 if (integer_onep (t))
2010 return boolean_true_node;
2011 else
2012 /* Save partial result for later. */
2013 partial = t;
2017 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2018 if (TREE_CODE (inner2) == SSA_NAME
2019 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2020 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2021 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2022 gimple_assign_rhs1 (s),
2023 gimple_assign_rhs2 (s),
2024 code2, op2a, op2b)))
2026 /* Handle the AND case, where we are reassociating:
2027 (inner1 AND inner2) AND (op2a code2 op2b)
2028 => (inner1 AND t) */
2029 if (is_and)
2031 if (integer_onep (t))
2032 return inner1;
2033 else if (integer_zerop (t))
2034 return boolean_false_node;
2037 /* Handle the OR case. where we are redistributing:
2038 (inner1 OR inner2) AND (op2a code2 op2b)
2039 => (t OR (inner1 AND (op2a code2 op2b)))
2040 => (t OR partial) */
2041 else
2043 if (integer_onep (t))
2044 return boolean_true_node;
2045 else if (partial)
2047 /* We already got a simplification for the other
2048 operand to the redistributed OR expression. The
2049 interesting case is when at least one is false.
2050 Or, if both are the same, we can apply the identity
2051 (x OR x) == x. */
2052 if (integer_zerop (partial))
2053 return t;
2054 else if (integer_zerop (t))
2055 return partial;
2056 else if (same_bool_result_p (t, partial))
2057 return t;
2062 return NULL_TREE;
2065 /* Try to simplify the AND of two comparisons defined by
2066 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2067 If this can be done without constructing an intermediate value,
2068 return the resulting tree; otherwise NULL_TREE is returned.
2069 This function is deliberately asymmetric as it recurses on SSA_DEFs
2070 in the first comparison but not the second. */
2072 static tree
2073 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2074 enum tree_code code2, tree op2a, tree op2b)
2076 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2077 if (operand_equal_p (op1a, op2a, 0)
2078 && operand_equal_p (op1b, op2b, 0))
2080 tree t = combine_comparisons (UNKNOWN_LOCATION,
2081 TRUTH_ANDIF_EXPR, code1, code2,
2082 boolean_type_node, op1a, op1b);
2083 if (t)
2084 return t;
2087 /* Likewise the swapped case of the above. */
2088 if (operand_equal_p (op1a, op2b, 0)
2089 && operand_equal_p (op1b, op2a, 0))
2091 tree t = combine_comparisons (UNKNOWN_LOCATION,
2092 TRUTH_ANDIF_EXPR, code1,
2093 swap_tree_comparison (code2),
2094 boolean_type_node, op1a, op1b);
2095 if (t)
2096 return t;
2099 /* If both comparisons are of the same value against constants, we might
2100 be able to merge them. */
2101 if (operand_equal_p (op1a, op2a, 0)
2102 && TREE_CODE (op1b) == INTEGER_CST
2103 && TREE_CODE (op2b) == INTEGER_CST)
2105 int cmp = tree_int_cst_compare (op1b, op2b);
2107 /* If we have (op1a == op1b), we should either be able to
2108 return that or FALSE, depending on whether the constant op1b
2109 also satisfies the other comparison against op2b. */
2110 if (code1 == EQ_EXPR)
2112 bool done = true;
2113 bool val;
2114 switch (code2)
2116 case EQ_EXPR: val = (cmp == 0); break;
2117 case NE_EXPR: val = (cmp != 0); break;
2118 case LT_EXPR: val = (cmp < 0); break;
2119 case GT_EXPR: val = (cmp > 0); break;
2120 case LE_EXPR: val = (cmp <= 0); break;
2121 case GE_EXPR: val = (cmp >= 0); break;
2122 default: done = false;
2124 if (done)
2126 if (val)
2127 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2128 else
2129 return boolean_false_node;
2132 /* Likewise if the second comparison is an == comparison. */
2133 else if (code2 == EQ_EXPR)
2135 bool done = true;
2136 bool val;
2137 switch (code1)
2139 case EQ_EXPR: val = (cmp == 0); break;
2140 case NE_EXPR: val = (cmp != 0); break;
2141 case LT_EXPR: val = (cmp > 0); break;
2142 case GT_EXPR: val = (cmp < 0); break;
2143 case LE_EXPR: val = (cmp >= 0); break;
2144 case GE_EXPR: val = (cmp <= 0); break;
2145 default: done = false;
2147 if (done)
2149 if (val)
2150 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2151 else
2152 return boolean_false_node;
2156 /* Same business with inequality tests. */
2157 else if (code1 == NE_EXPR)
2159 bool val;
2160 switch (code2)
2162 case EQ_EXPR: val = (cmp != 0); break;
2163 case NE_EXPR: val = (cmp == 0); break;
2164 case LT_EXPR: val = (cmp >= 0); break;
2165 case GT_EXPR: val = (cmp <= 0); break;
2166 case LE_EXPR: val = (cmp > 0); break;
2167 case GE_EXPR: val = (cmp < 0); break;
2168 default:
2169 val = false;
2171 if (val)
2172 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2174 else if (code2 == NE_EXPR)
2176 bool val;
2177 switch (code1)
2179 case EQ_EXPR: val = (cmp == 0); break;
2180 case NE_EXPR: val = (cmp != 0); break;
2181 case LT_EXPR: val = (cmp <= 0); break;
2182 case GT_EXPR: val = (cmp >= 0); break;
2183 case LE_EXPR: val = (cmp < 0); break;
2184 case GE_EXPR: val = (cmp > 0); break;
2185 default:
2186 val = false;
2188 if (val)
2189 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2192 /* Chose the more restrictive of two < or <= comparisons. */
2193 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2194 && (code2 == LT_EXPR || code2 == LE_EXPR))
2196 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2197 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2198 else
2199 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2202 /* Likewise chose the more restrictive of two > or >= comparisons. */
2203 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2204 && (code2 == GT_EXPR || code2 == GE_EXPR))
2206 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2207 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2208 else
2209 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2212 /* Check for singleton ranges. */
2213 else if (cmp == 0
2214 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2215 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2216 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2218 /* Check for disjoint ranges. */
2219 else if (cmp <= 0
2220 && (code1 == LT_EXPR || code1 == LE_EXPR)
2221 && (code2 == GT_EXPR || code2 == GE_EXPR))
2222 return boolean_false_node;
2223 else if (cmp >= 0
2224 && (code1 == GT_EXPR || code1 == GE_EXPR)
2225 && (code2 == LT_EXPR || code2 == LE_EXPR))
2226 return boolean_false_node;
2229 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2230 NAME's definition is a truth value. See if there are any simplifications
2231 that can be done against the NAME's definition. */
2232 if (TREE_CODE (op1a) == SSA_NAME
2233 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2234 && (integer_zerop (op1b) || integer_onep (op1b)))
2236 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2237 || (code1 == NE_EXPR && integer_onep (op1b)));
2238 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2239 switch (gimple_code (stmt))
2241 case GIMPLE_ASSIGN:
2242 /* Try to simplify by copy-propagating the definition. */
2243 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2245 case GIMPLE_PHI:
2246 /* If every argument to the PHI produces the same result when
2247 ANDed with the second comparison, we win.
2248 Do not do this unless the type is bool since we need a bool
2249 result here anyway. */
2250 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2252 tree result = NULL_TREE;
2253 unsigned i;
2254 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2256 tree arg = gimple_phi_arg_def (stmt, i);
2258 /* If this PHI has itself as an argument, ignore it.
2259 If all the other args produce the same result,
2260 we're still OK. */
2261 if (arg == gimple_phi_result (stmt))
2262 continue;
2263 else if (TREE_CODE (arg) == INTEGER_CST)
2265 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2267 if (!result)
2268 result = boolean_false_node;
2269 else if (!integer_zerop (result))
2270 return NULL_TREE;
2272 else if (!result)
2273 result = fold_build2 (code2, boolean_type_node,
2274 op2a, op2b);
2275 else if (!same_bool_comparison_p (result,
2276 code2, op2a, op2b))
2277 return NULL_TREE;
2279 else if (TREE_CODE (arg) == SSA_NAME)
2281 tree temp = and_var_with_comparison (arg, invert,
2282 code2, op2a, op2b);
2283 if (!temp)
2284 return NULL_TREE;
2285 else if (!result)
2286 result = temp;
2287 else if (!same_bool_result_p (result, temp))
2288 return NULL_TREE;
2290 else
2291 return NULL_TREE;
2293 return result;
2296 default:
2297 break;
2300 return NULL_TREE;
2303 /* Try to simplify the AND of two comparisons, specified by
2304 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2305 If this can be simplified to a single expression (without requiring
2306 introducing more SSA variables to hold intermediate values),
2307 return the resulting tree. Otherwise return NULL_TREE.
2308 If the result expression is non-null, it has boolean type. */
2310 tree
2311 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2312 enum tree_code code2, tree op2a, tree op2b)
2314 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2315 if (t)
2316 return t;
2317 else
2318 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2321 /* Helper function for or_comparisons_1: try to simplify the OR of the
2322 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2323 If INVERT is true, invert the value of VAR before doing the OR.
2324 Return NULL_EXPR if we can't simplify this to a single expression. */
2326 static tree
2327 or_var_with_comparison (tree var, bool invert,
2328 enum tree_code code2, tree op2a, tree op2b)
2330 tree t;
2331 gimple stmt = SSA_NAME_DEF_STMT (var);
2333 /* We can only deal with variables whose definitions are assignments. */
2334 if (!is_gimple_assign (stmt))
2335 return NULL_TREE;
2337 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2338 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2339 Then we only have to consider the simpler non-inverted cases. */
2340 if (invert)
2341 t = and_var_with_comparison_1 (stmt,
2342 invert_tree_comparison (code2, false),
2343 op2a, op2b);
2344 else
2345 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2346 return canonicalize_bool (t, invert);
2349 /* Try to simplify the OR of the ssa variable defined by the assignment
2350 STMT with the comparison specified by (OP2A CODE2 OP2B).
2351 Return NULL_EXPR if we can't simplify this to a single expression. */
2353 static tree
2354 or_var_with_comparison_1 (gimple stmt,
2355 enum tree_code code2, tree op2a, tree op2b)
2357 tree var = gimple_assign_lhs (stmt);
2358 tree true_test_var = NULL_TREE;
2359 tree false_test_var = NULL_TREE;
2360 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2362 /* Check for identities like (var OR (var != 0)) => true . */
2363 if (TREE_CODE (op2a) == SSA_NAME
2364 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2366 if ((code2 == NE_EXPR && integer_zerop (op2b))
2367 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2369 true_test_var = op2a;
2370 if (var == true_test_var)
2371 return var;
2373 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2374 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2376 false_test_var = op2a;
2377 if (var == false_test_var)
2378 return boolean_true_node;
2382 /* If the definition is a comparison, recurse on it. */
2383 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2385 tree t = or_comparisons_1 (innercode,
2386 gimple_assign_rhs1 (stmt),
2387 gimple_assign_rhs2 (stmt),
2388 code2,
2389 op2a,
2390 op2b);
2391 if (t)
2392 return t;
2395 /* If the definition is an AND or OR expression, we may be able to
2396 simplify by reassociating. */
2397 if (innercode == TRUTH_AND_EXPR
2398 || innercode == TRUTH_OR_EXPR
2399 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2400 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2402 tree inner1 = gimple_assign_rhs1 (stmt);
2403 tree inner2 = gimple_assign_rhs2 (stmt);
2404 gimple s;
2405 tree t;
2406 tree partial = NULL_TREE;
2407 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2409 /* Check for boolean identities that don't require recursive examination
2410 of inner1/inner2:
2411 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2412 inner1 OR (inner1 AND inner2) => inner1
2413 !inner1 OR (inner1 OR inner2) => true
2414 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2416 if (inner1 == true_test_var)
2417 return (is_or ? var : inner1);
2418 else if (inner2 == true_test_var)
2419 return (is_or ? var : inner2);
2420 else if (inner1 == false_test_var)
2421 return (is_or
2422 ? boolean_true_node
2423 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2424 else if (inner2 == false_test_var)
2425 return (is_or
2426 ? boolean_true_node
2427 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2429 /* Next, redistribute/reassociate the OR across the inner tests.
2430 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2431 if (TREE_CODE (inner1) == SSA_NAME
2432 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2433 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2434 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2435 gimple_assign_rhs1 (s),
2436 gimple_assign_rhs2 (s),
2437 code2, op2a, op2b)))
2439 /* Handle the OR case, where we are reassociating:
2440 (inner1 OR inner2) OR (op2a code2 op2b)
2441 => (t OR inner2)
2442 If the partial result t is a constant, we win. Otherwise
2443 continue on to try reassociating with the other inner test. */
2444 if (innercode == TRUTH_OR_EXPR)
2446 if (integer_onep (t))
2447 return boolean_true_node;
2448 else if (integer_zerop (t))
2449 return inner2;
2452 /* Handle the AND case, where we are redistributing:
2453 (inner1 AND inner2) OR (op2a code2 op2b)
2454 => (t AND (inner2 OR (op2a code op2b))) */
2455 else
2457 if (integer_zerop (t))
2458 return boolean_false_node;
2459 else
2460 /* Save partial result for later. */
2461 partial = t;
2465 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2466 if (TREE_CODE (inner2) == SSA_NAME
2467 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2468 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2469 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2470 gimple_assign_rhs1 (s),
2471 gimple_assign_rhs2 (s),
2472 code2, op2a, op2b)))
2474 /* Handle the OR case, where we are reassociating:
2475 (inner1 OR inner2) OR (op2a code2 op2b)
2476 => (inner1 OR t) */
2477 if (innercode == TRUTH_OR_EXPR)
2479 if (integer_zerop (t))
2480 return inner1;
2481 else if (integer_onep (t))
2482 return boolean_true_node;
2485 /* Handle the AND case, where we are redistributing:
2486 (inner1 AND inner2) OR (op2a code2 op2b)
2487 => (t AND (inner1 OR (op2a code2 op2b)))
2488 => (t AND partial) */
2489 else
2491 if (integer_zerop (t))
2492 return boolean_false_node;
2493 else if (partial)
2495 /* We already got a simplification for the other
2496 operand to the redistributed AND expression. The
2497 interesting case is when at least one is true.
2498 Or, if both are the same, we can apply the identity
2499 (x AND x) == true. */
2500 if (integer_onep (partial))
2501 return t;
2502 else if (integer_onep (t))
2503 return partial;
2504 else if (same_bool_result_p (t, partial))
2505 return boolean_true_node;
2510 return NULL_TREE;
2513 /* Try to simplify the OR of two comparisons defined by
2514 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2515 If this can be done without constructing an intermediate value,
2516 return the resulting tree; otherwise NULL_TREE is returned.
2517 This function is deliberately asymmetric as it recurses on SSA_DEFs
2518 in the first comparison but not the second. */
2520 static tree
2521 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2522 enum tree_code code2, tree op2a, tree op2b)
2524 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2525 if (operand_equal_p (op1a, op2a, 0)
2526 && operand_equal_p (op1b, op2b, 0))
2528 tree t = combine_comparisons (UNKNOWN_LOCATION,
2529 TRUTH_ORIF_EXPR, code1, code2,
2530 boolean_type_node, op1a, op1b);
2531 if (t)
2532 return t;
2535 /* Likewise the swapped case of the above. */
2536 if (operand_equal_p (op1a, op2b, 0)
2537 && operand_equal_p (op1b, op2a, 0))
2539 tree t = combine_comparisons (UNKNOWN_LOCATION,
2540 TRUTH_ORIF_EXPR, code1,
2541 swap_tree_comparison (code2),
2542 boolean_type_node, op1a, op1b);
2543 if (t)
2544 return t;
2547 /* If both comparisons are of the same value against constants, we might
2548 be able to merge them. */
2549 if (operand_equal_p (op1a, op2a, 0)
2550 && TREE_CODE (op1b) == INTEGER_CST
2551 && TREE_CODE (op2b) == INTEGER_CST)
2553 int cmp = tree_int_cst_compare (op1b, op2b);
2555 /* If we have (op1a != op1b), we should either be able to
2556 return that or TRUE, depending on whether the constant op1b
2557 also satisfies the other comparison against op2b. */
2558 if (code1 == NE_EXPR)
2560 bool done = true;
2561 bool val;
2562 switch (code2)
2564 case EQ_EXPR: val = (cmp == 0); break;
2565 case NE_EXPR: val = (cmp != 0); break;
2566 case LT_EXPR: val = (cmp < 0); break;
2567 case GT_EXPR: val = (cmp > 0); break;
2568 case LE_EXPR: val = (cmp <= 0); break;
2569 case GE_EXPR: val = (cmp >= 0); break;
2570 default: done = false;
2572 if (done)
2574 if (val)
2575 return boolean_true_node;
2576 else
2577 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2580 /* Likewise if the second comparison is a != comparison. */
2581 else if (code2 == NE_EXPR)
2583 bool done = true;
2584 bool val;
2585 switch (code1)
2587 case EQ_EXPR: val = (cmp == 0); break;
2588 case NE_EXPR: val = (cmp != 0); break;
2589 case LT_EXPR: val = (cmp > 0); break;
2590 case GT_EXPR: val = (cmp < 0); break;
2591 case LE_EXPR: val = (cmp >= 0); break;
2592 case GE_EXPR: val = (cmp <= 0); break;
2593 default: done = false;
2595 if (done)
2597 if (val)
2598 return boolean_true_node;
2599 else
2600 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2604 /* See if an equality test is redundant with the other comparison. */
2605 else if (code1 == EQ_EXPR)
2607 bool val;
2608 switch (code2)
2610 case EQ_EXPR: val = (cmp == 0); break;
2611 case NE_EXPR: val = (cmp != 0); break;
2612 case LT_EXPR: val = (cmp < 0); break;
2613 case GT_EXPR: val = (cmp > 0); break;
2614 case LE_EXPR: val = (cmp <= 0); break;
2615 case GE_EXPR: val = (cmp >= 0); break;
2616 default:
2617 val = false;
2619 if (val)
2620 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2622 else if (code2 == EQ_EXPR)
2624 bool val;
2625 switch (code1)
2627 case EQ_EXPR: val = (cmp == 0); break;
2628 case NE_EXPR: val = (cmp != 0); break;
2629 case LT_EXPR: val = (cmp > 0); break;
2630 case GT_EXPR: val = (cmp < 0); break;
2631 case LE_EXPR: val = (cmp >= 0); break;
2632 case GE_EXPR: val = (cmp <= 0); break;
2633 default:
2634 val = false;
2636 if (val)
2637 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2640 /* Chose the less restrictive of two < or <= comparisons. */
2641 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2642 && (code2 == LT_EXPR || code2 == LE_EXPR))
2644 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2645 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2646 else
2647 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2650 /* Likewise chose the less restrictive of two > or >= comparisons. */
2651 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2652 && (code2 == GT_EXPR || code2 == GE_EXPR))
2654 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2655 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2656 else
2657 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2660 /* Check for singleton ranges. */
2661 else if (cmp == 0
2662 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2663 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2664 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2666 /* Check for less/greater pairs that don't restrict the range at all. */
2667 else if (cmp >= 0
2668 && (code1 == LT_EXPR || code1 == LE_EXPR)
2669 && (code2 == GT_EXPR || code2 == GE_EXPR))
2670 return boolean_true_node;
2671 else if (cmp <= 0
2672 && (code1 == GT_EXPR || code1 == GE_EXPR)
2673 && (code2 == LT_EXPR || code2 == LE_EXPR))
2674 return boolean_true_node;
2677 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2678 NAME's definition is a truth value. See if there are any simplifications
2679 that can be done against the NAME's definition. */
2680 if (TREE_CODE (op1a) == SSA_NAME
2681 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2682 && (integer_zerop (op1b) || integer_onep (op1b)))
2684 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2685 || (code1 == NE_EXPR && integer_onep (op1b)));
2686 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2687 switch (gimple_code (stmt))
2689 case GIMPLE_ASSIGN:
2690 /* Try to simplify by copy-propagating the definition. */
2691 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2693 case GIMPLE_PHI:
2694 /* If every argument to the PHI produces the same result when
2695 ORed with the second comparison, we win.
2696 Do not do this unless the type is bool since we need a bool
2697 result here anyway. */
2698 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2700 tree result = NULL_TREE;
2701 unsigned i;
2702 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2704 tree arg = gimple_phi_arg_def (stmt, i);
2706 /* If this PHI has itself as an argument, ignore it.
2707 If all the other args produce the same result,
2708 we're still OK. */
2709 if (arg == gimple_phi_result (stmt))
2710 continue;
2711 else if (TREE_CODE (arg) == INTEGER_CST)
2713 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2715 if (!result)
2716 result = boolean_true_node;
2717 else if (!integer_onep (result))
2718 return NULL_TREE;
2720 else if (!result)
2721 result = fold_build2 (code2, boolean_type_node,
2722 op2a, op2b);
2723 else if (!same_bool_comparison_p (result,
2724 code2, op2a, op2b))
2725 return NULL_TREE;
2727 else if (TREE_CODE (arg) == SSA_NAME)
2729 tree temp = or_var_with_comparison (arg, invert,
2730 code2, op2a, op2b);
2731 if (!temp)
2732 return NULL_TREE;
2733 else if (!result)
2734 result = temp;
2735 else if (!same_bool_result_p (result, temp))
2736 return NULL_TREE;
2738 else
2739 return NULL_TREE;
2741 return result;
2744 default:
2745 break;
2748 return NULL_TREE;
2751 /* Try to simplify the OR of two comparisons, specified by
2752 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2753 If this can be simplified to a single expression (without requiring
2754 introducing more SSA variables to hold intermediate values),
2755 return the resulting tree. Otherwise return NULL_TREE.
2756 If the result expression is non-null, it has boolean type. */
2758 tree
2759 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2760 enum tree_code code2, tree op2a, tree op2b)
2762 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2763 if (t)
2764 return t;
2765 else
2766 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);