gcc/
[official-gcc.git] / gcc / gimple-fold.c
blob3479d07019e902baeff7042b9e709d65ed69b7d9
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
35 /* If SYM is a constant variable with known value, return the value.
36 NULL_TREE is returned otherwise. */
38 tree
39 get_symbol_constant_value (tree sym)
41 if (TREE_STATIC (sym)
42 && (TREE_READONLY (sym)
43 || TREE_CODE (sym) == CONST_DECL))
45 tree val = DECL_INITIAL (sym);
46 if (val)
48 STRIP_NOPS (val);
49 if (is_gimple_min_invariant (val))
51 if (TREE_CODE (val) == ADDR_EXPR)
53 tree base = get_base_address (TREE_OPERAND (val, 0));
54 if (base && TREE_CODE (base) == VAR_DECL)
56 TREE_ADDRESSABLE (base) = 1;
57 if (gimple_referenced_vars (cfun))
58 add_referenced_var (base);
61 return val;
64 /* Variables declared 'const' without an initializer
65 have zero as the initializer if they may not be
66 overridden at link or run time. */
67 if (!val
68 && !DECL_EXTERNAL (sym)
69 && targetm.binds_local_p (sym)
70 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
71 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
72 return fold_convert (TREE_TYPE (sym), integer_zero_node);
75 return NULL_TREE;
79 /* Return true if we may propagate the address expression ADDR into the
80 dereference DEREF and cancel them. */
82 bool
83 may_propagate_address_into_dereference (tree addr, tree deref)
85 gcc_assert (INDIRECT_REF_P (deref)
86 && TREE_CODE (addr) == ADDR_EXPR);
88 /* Don't propagate if ADDR's operand has incomplete type. */
89 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
90 return false;
92 /* If the address is invariant then we do not need to preserve restrict
93 qualifications. But we do need to preserve volatile qualifiers until
94 we can annotate the folded dereference itself properly. */
95 if (is_gimple_min_invariant (addr)
96 && (!TREE_THIS_VOLATILE (deref)
97 || TYPE_VOLATILE (TREE_TYPE (addr))))
98 return useless_type_conversion_p (TREE_TYPE (deref),
99 TREE_TYPE (TREE_OPERAND (addr, 0)));
101 /* Else both the address substitution and the folding must result in
102 a valid useless type conversion sequence. */
103 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
104 TREE_TYPE (addr))
105 && useless_type_conversion_p (TREE_TYPE (deref),
106 TREE_TYPE (TREE_OPERAND (addr, 0))));
110 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
111 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
112 is the desired result type.
114 LOC is the location of the original expression. */
116 static tree
117 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
118 tree orig_type,
119 bool allow_negative_idx)
121 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
122 tree array_type, elt_type, elt_size;
123 tree domain_type;
125 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
126 measured in units of the size of elements type) from that ARRAY_REF).
127 We can't do anything if either is variable.
129 The case we handle here is *(&A[N]+O). */
130 if (TREE_CODE (base) == ARRAY_REF)
132 tree low_bound = array_ref_low_bound (base);
134 elt_offset = TREE_OPERAND (base, 1);
135 if (TREE_CODE (low_bound) != INTEGER_CST
136 || TREE_CODE (elt_offset) != INTEGER_CST)
137 return NULL_TREE;
139 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
140 base = TREE_OPERAND (base, 0);
143 /* Ignore stupid user tricks of indexing non-array variables. */
144 array_type = TREE_TYPE (base);
145 if (TREE_CODE (array_type) != ARRAY_TYPE)
146 return NULL_TREE;
147 elt_type = TREE_TYPE (array_type);
148 if (!useless_type_conversion_p (orig_type, elt_type))
149 return NULL_TREE;
151 /* Use signed size type for intermediate computation on the index. */
152 idx_type = ssizetype;
154 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
155 element type (so we can use the alignment if it's not constant).
156 Otherwise, compute the offset as an index by using a division. If the
157 division isn't exact, then don't do anything. */
158 elt_size = TYPE_SIZE_UNIT (elt_type);
159 if (!elt_size)
160 return NULL;
161 if (integer_zerop (offset))
163 if (TREE_CODE (elt_size) != INTEGER_CST)
164 elt_size = size_int (TYPE_ALIGN (elt_type));
166 idx = build_int_cst (idx_type, 0);
168 else
170 unsigned HOST_WIDE_INT lquo, lrem;
171 HOST_WIDE_INT hquo, hrem;
172 double_int soffset;
174 /* The final array offset should be signed, so we need
175 to sign-extend the (possibly pointer) offset here
176 and use signed division. */
177 soffset = double_int_sext (tree_to_double_int (offset),
178 TYPE_PRECISION (TREE_TYPE (offset)));
179 if (TREE_CODE (elt_size) != INTEGER_CST
180 || div_and_round_double (TRUNC_DIV_EXPR, 0,
181 soffset.low, soffset.high,
182 TREE_INT_CST_LOW (elt_size),
183 TREE_INT_CST_HIGH (elt_size),
184 &lquo, &hquo, &lrem, &hrem)
185 || lrem || hrem)
186 return NULL_TREE;
188 idx = build_int_cst_wide (idx_type, lquo, hquo);
191 /* Assume the low bound is zero. If there is a domain type, get the
192 low bound, if any, convert the index into that type, and add the
193 low bound. */
194 min_idx = build_int_cst (idx_type, 0);
195 domain_type = TYPE_DOMAIN (array_type);
196 if (domain_type)
198 idx_type = domain_type;
199 if (TYPE_MIN_VALUE (idx_type))
200 min_idx = TYPE_MIN_VALUE (idx_type);
201 else
202 min_idx = fold_convert (idx_type, min_idx);
204 if (TREE_CODE (min_idx) != INTEGER_CST)
205 return NULL_TREE;
207 elt_offset = fold_convert (idx_type, elt_offset);
210 if (!integer_zerop (min_idx))
211 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
212 if (!integer_zerop (elt_offset))
213 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
215 /* Make sure to possibly truncate late after offsetting. */
216 idx = fold_convert (idx_type, idx);
218 /* We don't want to construct access past array bounds. For example
219 char *(c[4]);
220 c[3][2];
221 should not be simplified into (*c)[14] or tree-vrp will
222 give false warnings. The same is true for
223 struct A { long x; char d[0]; } *a;
224 (char *)a - 4;
225 which should be not folded to &a->d[-8]. */
226 if (domain_type
227 && TYPE_MAX_VALUE (domain_type)
228 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
230 tree up_bound = TYPE_MAX_VALUE (domain_type);
232 if (tree_int_cst_lt (up_bound, idx)
233 /* Accesses after the end of arrays of size 0 (gcc
234 extension) and 1 are likely intentional ("struct
235 hack"). */
236 && compare_tree_int (up_bound, 1) > 0)
237 return NULL_TREE;
239 if (domain_type
240 && TYPE_MIN_VALUE (domain_type))
242 if (!allow_negative_idx
243 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
244 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
245 return NULL_TREE;
247 else if (!allow_negative_idx
248 && compare_tree_int (idx, 0) < 0)
249 return NULL_TREE;
252 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
253 SET_EXPR_LOCATION (t, loc);
254 return t;
259 /* Attempt to fold *(S+O) to S.X.
260 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
261 is the desired result type.
263 LOC is the location of the original expression. */
265 static tree
266 maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
267 tree base, tree offset, tree orig_type)
269 tree f, t, field_type, tail_array_field, field_offset;
270 tree ret;
271 tree new_base;
273 if (TREE_CODE (record_type) != RECORD_TYPE
274 && TREE_CODE (record_type) != UNION_TYPE
275 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
276 return NULL_TREE;
278 /* Short-circuit silly cases. */
279 if (useless_type_conversion_p (record_type, orig_type))
280 return NULL_TREE;
282 tail_array_field = NULL_TREE;
283 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
285 int cmp;
287 if (TREE_CODE (f) != FIELD_DECL)
288 continue;
289 if (DECL_BIT_FIELD (f))
290 continue;
292 if (!DECL_FIELD_OFFSET (f))
293 continue;
294 field_offset = byte_position (f);
295 if (TREE_CODE (field_offset) != INTEGER_CST)
296 continue;
298 /* ??? Java creates "interesting" fields for representing base classes.
299 They have no name, and have no context. With no context, we get into
300 trouble with nonoverlapping_component_refs_p. Skip them. */
301 if (!DECL_FIELD_CONTEXT (f))
302 continue;
304 /* The previous array field isn't at the end. */
305 tail_array_field = NULL_TREE;
307 /* Check to see if this offset overlaps with the field. */
308 cmp = tree_int_cst_compare (field_offset, offset);
309 if (cmp > 0)
310 continue;
312 field_type = TREE_TYPE (f);
314 /* Here we exactly match the offset being checked. If the types match,
315 then we can return that field. */
316 if (cmp == 0
317 && useless_type_conversion_p (orig_type, field_type))
319 t = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
320 return t;
323 /* Don't care about offsets into the middle of scalars. */
324 if (!AGGREGATE_TYPE_P (field_type))
325 continue;
327 /* Check for array at the end of the struct. This is often
328 used as for flexible array members. We should be able to
329 turn this into an array access anyway. */
330 if (TREE_CODE (field_type) == ARRAY_TYPE)
331 tail_array_field = f;
333 /* Check the end of the field against the offset. */
334 if (!DECL_SIZE_UNIT (f)
335 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
336 continue;
337 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
338 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
339 continue;
341 /* If we matched, then set offset to the displacement into
342 this field. */
343 new_base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
344 SET_EXPR_LOCATION (new_base, loc);
346 /* Recurse to possibly find the match. */
347 ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
348 f == TYPE_FIELDS (record_type));
349 if (ret)
350 return ret;
351 ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
352 orig_type);
353 if (ret)
354 return ret;
357 if (!tail_array_field)
358 return NULL_TREE;
360 f = tail_array_field;
361 field_type = TREE_TYPE (f);
362 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
364 /* If we get here, we've got an aggregate field, and a possibly
365 nonzero offset into them. Recurse and hope for a valid match. */
366 base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
367 SET_EXPR_LOCATION (base, loc);
369 t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
370 f == TYPE_FIELDS (record_type));
371 if (t)
372 return t;
373 return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
374 orig_type);
377 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
378 or BASE[index] or by combination of those.
380 LOC is the location of original expression.
382 Before attempting the conversion strip off existing ADDR_EXPRs and
383 handled component refs. */
385 tree
386 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
387 tree orig_type)
389 tree ret;
390 tree type;
392 STRIP_NOPS (base);
393 if (TREE_CODE (base) != ADDR_EXPR)
394 return NULL_TREE;
396 base = TREE_OPERAND (base, 0);
398 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
399 so it needs to be removed and new COMPONENT_REF constructed.
400 The wrong COMPONENT_REF are often constructed by folding the
401 (type *)&object within the expression (type *)&object+offset */
402 if (handled_component_p (base))
404 HOST_WIDE_INT sub_offset, size, maxsize;
405 tree newbase;
406 newbase = get_ref_base_and_extent (base, &sub_offset,
407 &size, &maxsize);
408 gcc_assert (newbase);
409 if (size == maxsize
410 && size != -1
411 && !(sub_offset & (BITS_PER_UNIT - 1)))
413 base = newbase;
414 if (sub_offset)
415 offset = int_const_binop (PLUS_EXPR, offset,
416 build_int_cst (TREE_TYPE (offset),
417 sub_offset / BITS_PER_UNIT), 1);
420 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
421 && integer_zerop (offset))
422 return base;
423 type = TREE_TYPE (base);
425 ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type);
426 if (!ret)
427 ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true);
429 return ret;
432 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
433 or &BASE[index] or by combination of those.
435 LOC is the location of the original expression.
437 Before attempting the conversion strip off existing component refs. */
439 tree
440 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
441 tree orig_type)
443 tree t;
445 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
446 && POINTER_TYPE_P (orig_type));
448 t = maybe_fold_offset_to_reference (loc, addr, offset,
449 TREE_TYPE (orig_type));
450 if (t != NULL_TREE)
452 tree orig = addr;
453 tree ptr_type;
455 /* For __builtin_object_size to function correctly we need to
456 make sure not to fold address arithmetic so that we change
457 reference from one array to another. This would happen for
458 example for
460 struct X { char s1[10]; char s2[10] } s;
461 char *foo (void) { return &s.s2[-4]; }
463 where we need to avoid generating &s.s1[6]. As the C and
464 C++ frontends create different initial trees
465 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
466 sophisticated comparisons here. Note that checking for the
467 condition after the fact is easier than trying to avoid doing
468 the folding. */
469 STRIP_NOPS (orig);
470 if (TREE_CODE (orig) == ADDR_EXPR)
471 orig = TREE_OPERAND (orig, 0);
472 if ((TREE_CODE (orig) == ARRAY_REF
473 || (TREE_CODE (orig) == COMPONENT_REF
474 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
475 && (TREE_CODE (t) == ARRAY_REF
476 || TREE_CODE (t) == COMPONENT_REF)
477 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
478 ? TREE_OPERAND (orig, 0) : orig,
479 TREE_CODE (t) == ARRAY_REF
480 ? TREE_OPERAND (t, 0) : t, 0))
481 return NULL_TREE;
483 ptr_type = build_pointer_type (TREE_TYPE (t));
484 if (!useless_type_conversion_p (orig_type, ptr_type))
485 return NULL_TREE;
486 return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
489 return NULL_TREE;
492 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
493 Return the simplified expression, or NULL if nothing could be done. */
495 static tree
496 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
498 tree t;
499 bool volatile_p = TREE_THIS_VOLATILE (expr);
500 location_t loc = EXPR_LOCATION (expr);
502 /* We may well have constructed a double-nested PLUS_EXPR via multiple
503 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
504 are sometimes added. */
505 base = fold (base);
506 STRIP_TYPE_NOPS (base);
507 TREE_OPERAND (expr, 0) = base;
509 /* One possibility is that the address reduces to a string constant. */
510 t = fold_read_from_constant_string (expr);
511 if (t)
512 return t;
514 /* Add in any offset from a POINTER_PLUS_EXPR. */
515 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
517 tree offset2;
519 offset2 = TREE_OPERAND (base, 1);
520 if (TREE_CODE (offset2) != INTEGER_CST)
521 return NULL_TREE;
522 base = TREE_OPERAND (base, 0);
524 offset = fold_convert (sizetype,
525 int_const_binop (PLUS_EXPR, offset, offset2, 1));
528 if (TREE_CODE (base) == ADDR_EXPR)
530 tree base_addr = base;
532 /* Strip the ADDR_EXPR. */
533 base = TREE_OPERAND (base, 0);
535 /* Fold away CONST_DECL to its value, if the type is scalar. */
536 if (TREE_CODE (base) == CONST_DECL
537 && is_gimple_min_invariant (DECL_INITIAL (base)))
538 return DECL_INITIAL (base);
540 /* If there is no offset involved simply return the folded base. */
541 if (integer_zerop (offset))
542 return base;
544 /* Try folding *(&B+O) to B.X. */
545 t = maybe_fold_offset_to_reference (loc, base_addr, offset,
546 TREE_TYPE (expr));
547 if (t)
549 /* Preserve volatileness of the original expression.
550 We can end up with a plain decl here which is shared
551 and we shouldn't mess with its flags. */
552 if (!SSA_VAR_P (t))
553 TREE_THIS_VOLATILE (t) = volatile_p;
554 return t;
557 else
559 /* We can get here for out-of-range string constant accesses,
560 such as "_"[3]. Bail out of the entire substitution search
561 and arrange for the entire statement to be replaced by a
562 call to __builtin_trap. In all likelihood this will all be
563 constant-folded away, but in the meantime we can't leave with
564 something that get_expr_operands can't understand. */
566 t = base;
567 STRIP_NOPS (t);
568 if (TREE_CODE (t) == ADDR_EXPR
569 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
571 /* FIXME: Except that this causes problems elsewhere with dead
572 code not being deleted, and we die in the rtl expanders
573 because we failed to remove some ssa_name. In the meantime,
574 just return zero. */
575 /* FIXME2: This condition should be signaled by
576 fold_read_from_constant_string directly, rather than
577 re-checking for it here. */
578 return integer_zero_node;
581 /* Try folding *(B+O) to B->X. Still an improvement. */
582 if (POINTER_TYPE_P (TREE_TYPE (base)))
584 t = maybe_fold_offset_to_reference (loc, base, offset,
585 TREE_TYPE (expr));
586 if (t)
587 return t;
591 /* Otherwise we had an offset that we could not simplify. */
592 return NULL_TREE;
596 /* A quaint feature extant in our address arithmetic is that there
597 can be hidden type changes here. The type of the result need
598 not be the same as the type of the input pointer.
600 What we're after here is an expression of the form
601 (T *)(&array + const)
602 where array is OP0, const is OP1, RES_TYPE is T and
603 the cast doesn't actually exist, but is implicit in the
604 type of the POINTER_PLUS_EXPR. We'd like to turn this into
605 &array[x]
606 which may be able to propagate further. */
608 tree
609 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
611 tree ptd_type;
612 tree t;
614 /* The first operand should be an ADDR_EXPR. */
615 if (TREE_CODE (op0) != ADDR_EXPR)
616 return NULL_TREE;
617 op0 = TREE_OPERAND (op0, 0);
619 /* It had better be a constant. */
620 if (TREE_CODE (op1) != INTEGER_CST)
622 /* Or op0 should now be A[0] and the non-constant offset defined
623 via a multiplication by the array element size. */
624 if (TREE_CODE (op0) == ARRAY_REF
625 && integer_zerop (TREE_OPERAND (op0, 1))
626 && TREE_CODE (op1) == SSA_NAME
627 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
629 gimple offset_def = SSA_NAME_DEF_STMT (op1);
630 if (!is_gimple_assign (offset_def))
631 return NULL_TREE;
633 /* As we will end up creating a variable index array access
634 in the outermost array dimension make sure there isn't
635 a more inner array that the index could overflow to. */
636 if (TREE_CODE (TREE_OPERAND (op0, 0)) == ARRAY_REF)
637 return NULL_TREE;
639 /* Do not build array references of something that we can't
640 see the true number of array dimensions for. */
641 if (!DECL_P (TREE_OPERAND (op0, 0))
642 && !handled_component_p (TREE_OPERAND (op0, 0)))
643 return NULL_TREE;
645 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
646 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
647 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
648 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
649 return build_fold_addr_expr
650 (build4 (ARRAY_REF, TREE_TYPE (op0),
651 TREE_OPERAND (op0, 0),
652 gimple_assign_rhs1 (offset_def),
653 TREE_OPERAND (op0, 2),
654 TREE_OPERAND (op0, 3)));
655 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
656 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
657 return build_fold_addr_expr
658 (build4 (ARRAY_REF, TREE_TYPE (op0),
659 TREE_OPERAND (op0, 0),
660 op1,
661 TREE_OPERAND (op0, 2),
662 TREE_OPERAND (op0, 3)));
664 return NULL_TREE;
667 /* If the first operand is an ARRAY_REF, expand it so that we can fold
668 the offset into it. */
669 while (TREE_CODE (op0) == ARRAY_REF)
671 tree array_obj = TREE_OPERAND (op0, 0);
672 tree array_idx = TREE_OPERAND (op0, 1);
673 tree elt_type = TREE_TYPE (op0);
674 tree elt_size = TYPE_SIZE_UNIT (elt_type);
675 tree min_idx;
677 if (TREE_CODE (array_idx) != INTEGER_CST)
678 break;
679 if (TREE_CODE (elt_size) != INTEGER_CST)
680 break;
682 /* Un-bias the index by the min index of the array type. */
683 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
684 if (min_idx)
686 min_idx = TYPE_MIN_VALUE (min_idx);
687 if (min_idx)
689 if (TREE_CODE (min_idx) != INTEGER_CST)
690 break;
692 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
693 if (!integer_zerop (min_idx))
694 array_idx = int_const_binop (MINUS_EXPR, array_idx,
695 min_idx, 0);
699 /* Convert the index to a byte offset. */
700 array_idx = fold_convert (sizetype, array_idx);
701 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
703 /* Update the operands for the next round, or for folding. */
704 op1 = int_const_binop (PLUS_EXPR,
705 array_idx, op1, 0);
706 op0 = array_obj;
709 ptd_type = TREE_TYPE (res_type);
710 /* If we want a pointer to void, reconstruct the reference from the
711 array element type. A pointer to that can be trivially converted
712 to void *. This happens as we fold (void *)(ptr p+ off). */
713 if (VOID_TYPE_P (ptd_type)
714 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
715 ptd_type = TREE_TYPE (TREE_TYPE (op0));
717 /* At which point we can try some of the same things as for indirects. */
718 t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
719 if (!t)
720 t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
721 ptd_type);
722 if (t)
724 t = build1 (ADDR_EXPR, res_type, t);
725 SET_EXPR_LOCATION (t, loc);
728 return t;
731 /* Subroutine of fold_stmt. We perform several simplifications of the
732 memory reference tree EXPR and make sure to re-gimplify them properly
733 after propagation of constant addresses. IS_LHS is true if the
734 reference is supposed to be an lvalue. */
736 static tree
737 maybe_fold_reference (tree expr, bool is_lhs)
739 tree *t = &expr;
741 if (TREE_CODE (expr) == ARRAY_REF
742 && !is_lhs)
744 tree tem = fold_read_from_constant_string (expr);
745 if (tem)
746 return tem;
749 /* ??? We might want to open-code the relevant remaining cases
750 to avoid using the generic fold. */
751 if (handled_component_p (*t)
752 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
754 tree tem = fold (*t);
755 if (tem != *t)
756 return tem;
759 while (handled_component_p (*t))
760 t = &TREE_OPERAND (*t, 0);
762 if (TREE_CODE (*t) == INDIRECT_REF)
764 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
765 integer_zero_node);
766 /* Avoid folding *"abc" = 5 into 'a' = 5. */
767 if (is_lhs && tem && CONSTANT_CLASS_P (tem))
768 tem = NULL_TREE;
769 if (!tem
770 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
771 /* If we had a good reason for propagating the address here,
772 make sure we end up with valid gimple. See PR34989. */
773 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
775 if (tem)
777 *t = tem;
778 tem = maybe_fold_reference (expr, is_lhs);
779 if (tem)
780 return tem;
781 return expr;
784 else if (!is_lhs
785 && DECL_P (*t))
787 tree tem = get_symbol_constant_value (*t);
788 if (tem
789 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
791 *t = unshare_expr (tem);
792 tem = maybe_fold_reference (expr, is_lhs);
793 if (tem)
794 return tem;
795 return expr;
799 return NULL_TREE;
803 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
804 replacement rhs for the statement or NULL_TREE if no simplification
805 could be made. It is assumed that the operands have been previously
806 folded. */
808 static tree
809 fold_gimple_assign (gimple_stmt_iterator *si)
811 gimple stmt = gsi_stmt (*si);
812 enum tree_code subcode = gimple_assign_rhs_code (stmt);
813 location_t loc = gimple_location (stmt);
815 tree result = NULL_TREE;
817 switch (get_gimple_rhs_class (subcode))
819 case GIMPLE_SINGLE_RHS:
821 tree rhs = gimple_assign_rhs1 (stmt);
823 /* Try to fold a conditional expression. */
824 if (TREE_CODE (rhs) == COND_EXPR)
826 tree op0 = COND_EXPR_COND (rhs);
827 tree tem;
828 bool set = false;
829 location_t cond_loc = EXPR_LOCATION (rhs);
831 if (COMPARISON_CLASS_P (op0))
833 fold_defer_overflow_warnings ();
834 tem = fold_binary_loc (cond_loc,
835 TREE_CODE (op0), TREE_TYPE (op0),
836 TREE_OPERAND (op0, 0),
837 TREE_OPERAND (op0, 1));
838 /* This is actually a conditional expression, not a GIMPLE
839 conditional statement, however, the valid_gimple_rhs_p
840 test still applies. */
841 set = (tem && is_gimple_condexpr (tem)
842 && valid_gimple_rhs_p (tem));
843 fold_undefer_overflow_warnings (set, stmt, 0);
845 else if (is_gimple_min_invariant (op0))
847 tem = op0;
848 set = true;
850 else
851 return NULL_TREE;
853 if (set)
854 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
855 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
858 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
859 return maybe_fold_tmr (rhs);
861 else if (REFERENCE_CLASS_P (rhs))
862 return maybe_fold_reference (rhs, false);
864 else if (TREE_CODE (rhs) == ADDR_EXPR)
866 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
867 if (tem)
868 result = fold_convert (TREE_TYPE (rhs),
869 build_fold_addr_expr_loc (loc, tem));
872 else if (TREE_CODE (rhs) == CONSTRUCTOR
873 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
874 && (CONSTRUCTOR_NELTS (rhs)
875 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
877 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
878 unsigned i;
879 tree val;
881 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
882 if (TREE_CODE (val) != INTEGER_CST
883 && TREE_CODE (val) != REAL_CST
884 && TREE_CODE (val) != FIXED_CST)
885 return NULL_TREE;
887 return build_vector_from_ctor (TREE_TYPE (rhs),
888 CONSTRUCTOR_ELTS (rhs));
891 else if (DECL_P (rhs))
892 return unshare_expr (get_symbol_constant_value (rhs));
894 /* If we couldn't fold the RHS, hand over to the generic
895 fold routines. */
896 if (result == NULL_TREE)
897 result = fold (rhs);
899 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
900 that may have been added by fold, and "useless" type
901 conversions that might now be apparent due to propagation. */
902 STRIP_USELESS_TYPE_CONVERSION (result);
904 if (result != rhs && valid_gimple_rhs_p (result))
905 return result;
907 return NULL_TREE;
909 break;
911 case GIMPLE_UNARY_RHS:
913 tree rhs = gimple_assign_rhs1 (stmt);
915 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
916 if (result)
918 /* If the operation was a conversion do _not_ mark a
919 resulting constant with TREE_OVERFLOW if the original
920 constant was not. These conversions have implementation
921 defined behavior and retaining the TREE_OVERFLOW flag
922 here would confuse later passes such as VRP. */
923 if (CONVERT_EXPR_CODE_P (subcode)
924 && TREE_CODE (result) == INTEGER_CST
925 && TREE_CODE (rhs) == INTEGER_CST)
926 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
928 STRIP_USELESS_TYPE_CONVERSION (result);
929 if (valid_gimple_rhs_p (result))
930 return result;
932 else if (CONVERT_EXPR_CODE_P (subcode)
933 && POINTER_TYPE_P (gimple_expr_type (stmt))
934 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
936 tree type = gimple_expr_type (stmt);
937 tree t = maybe_fold_offset_to_address (loc,
938 gimple_assign_rhs1 (stmt),
939 integer_zero_node, type);
940 if (t)
941 return t;
944 break;
946 case GIMPLE_BINARY_RHS:
947 /* Try to fold pointer addition. */
948 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
950 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
951 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
953 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
954 if (!useless_type_conversion_p
955 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
956 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
958 result = maybe_fold_stmt_addition (gimple_location (stmt),
959 type,
960 gimple_assign_rhs1 (stmt),
961 gimple_assign_rhs2 (stmt));
964 if (!result)
965 result = fold_binary_loc (loc, subcode,
966 TREE_TYPE (gimple_assign_lhs (stmt)),
967 gimple_assign_rhs1 (stmt),
968 gimple_assign_rhs2 (stmt));
970 if (result)
972 STRIP_USELESS_TYPE_CONVERSION (result);
973 if (valid_gimple_rhs_p (result))
974 return result;
976 /* Fold might have produced non-GIMPLE, so if we trust it blindly
977 we lose canonicalization opportunities. Do not go again
978 through fold here though, or the same non-GIMPLE will be
979 produced. */
980 if (commutative_tree_code (subcode)
981 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
982 gimple_assign_rhs2 (stmt), false))
983 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
984 gimple_assign_rhs2 (stmt),
985 gimple_assign_rhs1 (stmt));
987 break;
989 case GIMPLE_INVALID_RHS:
990 gcc_unreachable ();
993 return NULL_TREE;
996 /* Attempt to fold a conditional statement. Return true if any changes were
997 made. We only attempt to fold the condition expression, and do not perform
998 any transformation that would require alteration of the cfg. It is
999 assumed that the operands have been previously folded. */
1001 static bool
1002 fold_gimple_cond (gimple stmt)
1004 tree result = fold_binary_loc (gimple_location (stmt),
1005 gimple_cond_code (stmt),
1006 boolean_type_node,
1007 gimple_cond_lhs (stmt),
1008 gimple_cond_rhs (stmt));
1010 if (result)
1012 STRIP_USELESS_TYPE_CONVERSION (result);
1013 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
1015 gimple_cond_set_condition_from_tree (stmt, result);
1016 return true;
1020 return false;
1023 /* Convert EXPR into a GIMPLE value suitable for substitution on the
1024 RHS of an assignment. Insert the necessary statements before
1025 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
1026 is replaced. If the call is expected to produces a result, then it
1027 is replaced by an assignment of the new RHS to the result variable.
1028 If the result is to be ignored, then the call is replaced by a
1029 GIMPLE_NOP. */
1031 void
1032 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
1034 tree lhs;
1035 tree tmp = NULL_TREE; /* Silence warning. */
1036 gimple stmt, new_stmt;
1037 gimple_stmt_iterator i;
1038 gimple_seq stmts = gimple_seq_alloc();
1039 struct gimplify_ctx gctx;
1040 gimple last = NULL;
1042 stmt = gsi_stmt (*si_p);
1044 gcc_assert (is_gimple_call (stmt));
1046 lhs = gimple_call_lhs (stmt);
1048 push_gimplify_context (&gctx);
1050 if (lhs == NULL_TREE)
1051 gimplify_and_add (expr, &stmts);
1052 else
1053 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
1055 pop_gimplify_context (NULL);
1057 if (gimple_has_location (stmt))
1058 annotate_all_with_location (stmts, gimple_location (stmt));
1060 /* The replacement can expose previously unreferenced variables. */
1061 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
1063 if (last)
1065 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1066 gsi_next (si_p);
1068 new_stmt = gsi_stmt (i);
1069 find_new_referenced_vars (new_stmt);
1070 mark_symbols_for_renaming (new_stmt);
1071 last = new_stmt;
1074 if (lhs == NULL_TREE)
1076 unlink_stmt_vdef (stmt);
1077 release_defs (stmt);
1078 new_stmt = last;
1080 else
1082 if (last)
1084 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1085 gsi_next (si_p);
1087 new_stmt = gimple_build_assign (lhs, tmp);
1088 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1089 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1090 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
1093 gimple_set_location (new_stmt, gimple_location (stmt));
1094 gsi_replace (si_p, new_stmt, false);
1097 /* Return the string length, maximum string length or maximum value of
1098 ARG in LENGTH.
1099 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1100 is not NULL and, for TYPE == 0, its value is not equal to the length
1101 we determine or if we are unable to determine the length or value,
1102 return false. VISITED is a bitmap of visited variables.
1103 TYPE is 0 if string length should be returned, 1 for maximum string
1104 length and 2 for maximum value ARG can have. */
1106 static bool
1107 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1109 tree var, val;
1110 gimple def_stmt;
1112 if (TREE_CODE (arg) != SSA_NAME)
1114 if (TREE_CODE (arg) == COND_EXPR)
1115 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1116 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1117 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1118 else if (TREE_CODE (arg) == ADDR_EXPR
1119 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1120 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1122 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1123 if (TREE_CODE (aop0) == INDIRECT_REF
1124 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1125 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1126 length, visited, type);
1129 if (type == 2)
1131 val = arg;
1132 if (TREE_CODE (val) != INTEGER_CST
1133 || tree_int_cst_sgn (val) < 0)
1134 return false;
1136 else
1137 val = c_strlen (arg, 1);
1138 if (!val)
1139 return false;
1141 if (*length)
1143 if (type > 0)
1145 if (TREE_CODE (*length) != INTEGER_CST
1146 || TREE_CODE (val) != INTEGER_CST)
1147 return false;
1149 if (tree_int_cst_lt (*length, val))
1150 *length = val;
1151 return true;
1153 else if (simple_cst_equal (val, *length) != 1)
1154 return false;
1157 *length = val;
1158 return true;
1161 /* If we were already here, break the infinite cycle. */
1162 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
1163 return true;
1164 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
1166 var = arg;
1167 def_stmt = SSA_NAME_DEF_STMT (var);
1169 switch (gimple_code (def_stmt))
1171 case GIMPLE_ASSIGN:
1172 /* The RHS of the statement defining VAR must either have a
1173 constant length or come from another SSA_NAME with a constant
1174 length. */
1175 if (gimple_assign_single_p (def_stmt)
1176 || gimple_assign_unary_nop_p (def_stmt))
1178 tree rhs = gimple_assign_rhs1 (def_stmt);
1179 return get_maxval_strlen (rhs, length, visited, type);
1181 return false;
1183 case GIMPLE_PHI:
1185 /* All the arguments of the PHI node must have the same constant
1186 length. */
1187 unsigned i;
1189 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1191 tree arg = gimple_phi_arg (def_stmt, i)->def;
1193 /* If this PHI has itself as an argument, we cannot
1194 determine the string length of this argument. However,
1195 if we can find a constant string length for the other
1196 PHI args then we can still be sure that this is a
1197 constant string length. So be optimistic and just
1198 continue with the next argument. */
1199 if (arg == gimple_phi_result (def_stmt))
1200 continue;
1202 if (!get_maxval_strlen (arg, length, visited, type))
1203 return false;
1206 return true;
1208 default:
1209 return false;
1214 /* Fold builtin call in statement STMT. Returns a simplified tree.
1215 We may return a non-constant expression, including another call
1216 to a different function and with different arguments, e.g.,
1217 substituting memcpy for strcpy when the string length is known.
1218 Note that some builtins expand into inline code that may not
1219 be valid in GIMPLE. Callers must take care. */
1221 tree
1222 gimple_fold_builtin (gimple stmt)
1224 tree result, val[3];
1225 tree callee, a;
1226 int arg_idx, type;
1227 bitmap visited;
1228 bool ignore;
1229 int nargs;
1230 location_t loc = gimple_location (stmt);
1232 gcc_assert (is_gimple_call (stmt));
1234 ignore = (gimple_call_lhs (stmt) == NULL);
1236 /* First try the generic builtin folder. If that succeeds, return the
1237 result directly. */
1238 result = fold_call_stmt (stmt, ignore);
1239 if (result)
1241 if (ignore)
1242 STRIP_NOPS (result);
1243 return result;
1246 /* Ignore MD builtins. */
1247 callee = gimple_call_fndecl (stmt);
1248 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1249 return NULL_TREE;
1251 /* If the builtin could not be folded, and it has no argument list,
1252 we're done. */
1253 nargs = gimple_call_num_args (stmt);
1254 if (nargs == 0)
1255 return NULL_TREE;
1257 /* Limit the work only for builtins we know how to simplify. */
1258 switch (DECL_FUNCTION_CODE (callee))
1260 case BUILT_IN_STRLEN:
1261 case BUILT_IN_FPUTS:
1262 case BUILT_IN_FPUTS_UNLOCKED:
1263 arg_idx = 0;
1264 type = 0;
1265 break;
1266 case BUILT_IN_STRCPY:
1267 case BUILT_IN_STRNCPY:
1268 arg_idx = 1;
1269 type = 0;
1270 break;
1271 case BUILT_IN_MEMCPY_CHK:
1272 case BUILT_IN_MEMPCPY_CHK:
1273 case BUILT_IN_MEMMOVE_CHK:
1274 case BUILT_IN_MEMSET_CHK:
1275 case BUILT_IN_STRNCPY_CHK:
1276 arg_idx = 2;
1277 type = 2;
1278 break;
1279 case BUILT_IN_STRCPY_CHK:
1280 case BUILT_IN_STPCPY_CHK:
1281 arg_idx = 1;
1282 type = 1;
1283 break;
1284 case BUILT_IN_SNPRINTF_CHK:
1285 case BUILT_IN_VSNPRINTF_CHK:
1286 arg_idx = 1;
1287 type = 2;
1288 break;
1289 default:
1290 return NULL_TREE;
1293 if (arg_idx >= nargs)
1294 return NULL_TREE;
1296 /* Try to use the dataflow information gathered by the CCP process. */
1297 visited = BITMAP_ALLOC (NULL);
1298 bitmap_clear (visited);
1300 memset (val, 0, sizeof (val));
1301 a = gimple_call_arg (stmt, arg_idx);
1302 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1303 val[arg_idx] = NULL_TREE;
1305 BITMAP_FREE (visited);
1307 result = NULL_TREE;
1308 switch (DECL_FUNCTION_CODE (callee))
1310 case BUILT_IN_STRLEN:
1311 if (val[0] && nargs == 1)
1313 tree new_val =
1314 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1316 /* If the result is not a valid gimple value, or not a cast
1317 of a valid gimple value, then we can not use the result. */
1318 if (is_gimple_val (new_val)
1319 || (is_gimple_cast (new_val)
1320 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1321 return new_val;
1323 break;
1325 case BUILT_IN_STRCPY:
1326 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1327 result = fold_builtin_strcpy (loc, callee,
1328 gimple_call_arg (stmt, 0),
1329 gimple_call_arg (stmt, 1),
1330 val[1]);
1331 break;
1333 case BUILT_IN_STRNCPY:
1334 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1335 result = fold_builtin_strncpy (loc, callee,
1336 gimple_call_arg (stmt, 0),
1337 gimple_call_arg (stmt, 1),
1338 gimple_call_arg (stmt, 2),
1339 val[1]);
1340 break;
1342 case BUILT_IN_FPUTS:
1343 if (nargs == 2)
1344 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1345 gimple_call_arg (stmt, 1),
1346 ignore, false, val[0]);
1347 break;
1349 case BUILT_IN_FPUTS_UNLOCKED:
1350 if (nargs == 2)
1351 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1352 gimple_call_arg (stmt, 1),
1353 ignore, true, val[0]);
1354 break;
1356 case BUILT_IN_MEMCPY_CHK:
1357 case BUILT_IN_MEMPCPY_CHK:
1358 case BUILT_IN_MEMMOVE_CHK:
1359 case BUILT_IN_MEMSET_CHK:
1360 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1361 result = fold_builtin_memory_chk (loc, callee,
1362 gimple_call_arg (stmt, 0),
1363 gimple_call_arg (stmt, 1),
1364 gimple_call_arg (stmt, 2),
1365 gimple_call_arg (stmt, 3),
1366 val[2], ignore,
1367 DECL_FUNCTION_CODE (callee));
1368 break;
1370 case BUILT_IN_STRCPY_CHK:
1371 case BUILT_IN_STPCPY_CHK:
1372 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1373 result = fold_builtin_stxcpy_chk (loc, callee,
1374 gimple_call_arg (stmt, 0),
1375 gimple_call_arg (stmt, 1),
1376 gimple_call_arg (stmt, 2),
1377 val[1], ignore,
1378 DECL_FUNCTION_CODE (callee));
1379 break;
1381 case BUILT_IN_STRNCPY_CHK:
1382 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1383 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1384 gimple_call_arg (stmt, 1),
1385 gimple_call_arg (stmt, 2),
1386 gimple_call_arg (stmt, 3),
1387 val[2]);
1388 break;
1390 case BUILT_IN_SNPRINTF_CHK:
1391 case BUILT_IN_VSNPRINTF_CHK:
1392 if (val[1] && is_gimple_val (val[1]))
1393 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1394 DECL_FUNCTION_CODE (callee));
1395 break;
1397 default:
1398 gcc_unreachable ();
1401 if (result && ignore)
1402 result = fold_ignored_result (result);
1403 return result;
1406 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1407 it is found or NULL_TREE if it is not. */
1409 static tree
1410 get_base_binfo_for_type (tree binfo, tree type)
1412 int i;
1413 tree base_binfo;
1414 tree res = NULL_TREE;
1416 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1417 if (TREE_TYPE (base_binfo) == type)
1419 gcc_assert (!res);
1420 res = base_binfo;
1423 return res;
1426 /* Return a binfo describing the part of object referenced by expression REF.
1427 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1428 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1429 a simple declaration, indirect reference or an SSA_NAME. If the function
1430 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1431 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1432 Otherwise the first non-artificial field declaration or the base declaration
1433 will be examined to get the encapsulating type. */
1435 tree
1436 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1438 while (true)
1440 if (TREE_CODE (ref) == COMPONENT_REF)
1442 tree par_type;
1443 tree binfo, base_binfo;
1444 tree field = TREE_OPERAND (ref, 1);
1446 if (!DECL_ARTIFICIAL (field))
1448 tree type = TREE_TYPE (field);
1449 if (TREE_CODE (type) == RECORD_TYPE)
1450 return TYPE_BINFO (type);
1451 else
1452 return NULL_TREE;
1455 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1456 binfo = TYPE_BINFO (par_type);
1457 if (!binfo
1458 || BINFO_N_BASE_BINFOS (binfo) == 0)
1459 return NULL_TREE;
1461 base_binfo = BINFO_BASE_BINFO (binfo, 0);
1462 if (BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1464 tree d_binfo;
1466 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1467 known_binfo);
1468 /* Get descendant binfo. */
1469 if (!d_binfo)
1470 return NULL_TREE;
1471 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1474 ref = TREE_OPERAND (ref, 0);
1476 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1477 return TYPE_BINFO (TREE_TYPE (ref));
1478 else if (known_binfo
1479 && (TREE_CODE (ref) == SSA_NAME
1480 || TREE_CODE (ref) == INDIRECT_REF))
1481 return known_binfo;
1482 else
1483 return NULL_TREE;
1487 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1488 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1489 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1491 tree
1492 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1494 HOST_WIDE_INT i;
1495 tree v, fndecl;
1497 v = BINFO_VIRTUALS (known_binfo);
1498 i = 0;
1499 while (i != token)
1501 i += (TARGET_VTABLE_USES_DESCRIPTORS
1502 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1503 v = TREE_CHAIN (v);
1506 fndecl = TREE_VALUE (v);
1507 return build_fold_addr_expr (fndecl);
1511 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1512 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1513 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1514 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1515 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1517 tree
1518 gimple_fold_obj_type_ref (tree ref, tree known_type)
1520 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1521 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1522 tree binfo;
1524 if (TREE_CODE (obj) == ADDR_EXPR)
1525 obj = TREE_OPERAND (obj, 0);
1527 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1528 if (binfo)
1530 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1531 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1533 else
1534 return NULL_TREE;
1537 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1538 The statement may be replaced by another statement, e.g., if the call
1539 simplifies to a constant value. Return true if any changes were made.
1540 It is assumed that the operands have been previously folded. */
1542 static bool
1543 fold_gimple_call (gimple_stmt_iterator *gsi)
1545 gimple stmt = gsi_stmt (*gsi);
1547 tree callee = gimple_call_fndecl (stmt);
1549 /* Check for builtins that CCP can handle using information not
1550 available in the generic fold routines. */
1551 if (callee && DECL_BUILT_IN (callee))
1553 tree result = gimple_fold_builtin (stmt);
1555 if (result)
1557 if (!update_call_from_tree (gsi, result))
1558 gimplify_and_update_call_from_tree (gsi, result);
1559 return true;
1562 else
1564 /* ??? Should perhaps do this in fold proper. However, doing it
1565 there requires that we create a new CALL_EXPR, and that requires
1566 copying EH region info to the new node. Easier to just do it
1567 here where we can just smash the call operand. */
1568 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1569 callee = gimple_call_fn (stmt);
1570 if (TREE_CODE (callee) == OBJ_TYPE_REF
1571 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1573 tree t;
1575 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1576 if (t)
1578 gimple_call_set_fn (stmt, t);
1579 return true;
1584 return false;
1587 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1588 distinguishes both cases. */
1590 static bool
1591 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1593 bool changed = false;
1594 gimple stmt = gsi_stmt (*gsi);
1595 unsigned i;
1597 /* Fold the main computation performed by the statement. */
1598 switch (gimple_code (stmt))
1600 case GIMPLE_ASSIGN:
1602 unsigned old_num_ops = gimple_num_ops (stmt);
1603 tree new_rhs = fold_gimple_assign (gsi);
1604 tree lhs = gimple_assign_lhs (stmt);
1605 if (new_rhs
1606 && !useless_type_conversion_p (TREE_TYPE (lhs),
1607 TREE_TYPE (new_rhs)))
1608 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1609 if (new_rhs
1610 && (!inplace
1611 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1613 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1614 changed = true;
1616 break;
1619 case GIMPLE_COND:
1620 changed |= fold_gimple_cond (stmt);
1621 break;
1623 case GIMPLE_CALL:
1624 /* Fold *& in call arguments. */
1625 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1626 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1628 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1629 if (tmp)
1631 gimple_call_set_arg (stmt, i, tmp);
1632 changed = true;
1635 /* The entire statement may be replaced in this case. */
1636 if (!inplace)
1637 changed |= fold_gimple_call (gsi);
1638 break;
1640 case GIMPLE_ASM:
1641 /* Fold *& in asm operands. */
1642 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1644 tree link = gimple_asm_output_op (stmt, i);
1645 tree op = TREE_VALUE (link);
1646 if (REFERENCE_CLASS_P (op)
1647 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1649 TREE_VALUE (link) = op;
1650 changed = true;
1653 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1655 tree link = gimple_asm_input_op (stmt, i);
1656 tree op = TREE_VALUE (link);
1657 if (REFERENCE_CLASS_P (op)
1658 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1660 TREE_VALUE (link) = op;
1661 changed = true;
1664 break;
1666 default:;
1669 stmt = gsi_stmt (*gsi);
1671 /* Fold *& on the lhs. */
1672 if (gimple_has_lhs (stmt))
1674 tree lhs = gimple_get_lhs (stmt);
1675 if (lhs && REFERENCE_CLASS_P (lhs))
1677 tree new_lhs = maybe_fold_reference (lhs, true);
1678 if (new_lhs)
1680 gimple_set_lhs (stmt, new_lhs);
1681 changed = true;
1686 return changed;
1689 /* Fold the statement pointed to by GSI. In some cases, this function may
1690 replace the whole statement with a new one. Returns true iff folding
1691 makes any changes.
1692 The statement pointed to by GSI should be in valid gimple form but may
1693 be in unfolded state as resulting from for example constant propagation
1694 which can produce *&x = 0. */
1696 bool
1697 fold_stmt (gimple_stmt_iterator *gsi)
1699 return fold_stmt_1 (gsi, false);
1702 /* Perform the minimal folding on statement STMT. Only operations like
1703 *&x created by constant propagation are handled. The statement cannot
1704 be replaced with a new one. Return true if the statement was
1705 changed, false otherwise.
1706 The statement STMT should be in valid gimple form but may
1707 be in unfolded state as resulting from for example constant propagation
1708 which can produce *&x = 0. */
1710 bool
1711 fold_stmt_inplace (gimple stmt)
1713 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1714 bool changed = fold_stmt_1 (&gsi, true);
1715 gcc_assert (gsi_stmt (gsi) == stmt);
1716 return changed;
1719 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1720 if EXPR is null or we don't know how.
1721 If non-null, the result always has boolean type. */
1723 static tree
1724 canonicalize_bool (tree expr, bool invert)
1726 if (!expr)
1727 return NULL_TREE;
1728 else if (invert)
1730 if (integer_nonzerop (expr))
1731 return boolean_false_node;
1732 else if (integer_zerop (expr))
1733 return boolean_true_node;
1734 else if (TREE_CODE (expr) == SSA_NAME)
1735 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1736 build_int_cst (TREE_TYPE (expr), 0));
1737 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1738 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1739 boolean_type_node,
1740 TREE_OPERAND (expr, 0),
1741 TREE_OPERAND (expr, 1));
1742 else
1743 return NULL_TREE;
1745 else
1747 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1748 return expr;
1749 if (integer_nonzerop (expr))
1750 return boolean_true_node;
1751 else if (integer_zerop (expr))
1752 return boolean_false_node;
1753 else if (TREE_CODE (expr) == SSA_NAME)
1754 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1755 build_int_cst (TREE_TYPE (expr), 0));
1756 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1757 return fold_build2 (TREE_CODE (expr),
1758 boolean_type_node,
1759 TREE_OPERAND (expr, 0),
1760 TREE_OPERAND (expr, 1));
1761 else
1762 return NULL_TREE;
1766 /* Check to see if a boolean expression EXPR is logically equivalent to the
1767 comparison (OP1 CODE OP2). Check for various identities involving
1768 SSA_NAMEs. */
1770 static bool
1771 same_bool_comparison_p (const_tree expr, enum tree_code code,
1772 const_tree op1, const_tree op2)
1774 gimple s;
1776 /* The obvious case. */
1777 if (TREE_CODE (expr) == code
1778 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1779 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1780 return true;
1782 /* Check for comparing (name, name != 0) and the case where expr
1783 is an SSA_NAME with a definition matching the comparison. */
1784 if (TREE_CODE (expr) == SSA_NAME
1785 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1787 if (operand_equal_p (expr, op1, 0))
1788 return ((code == NE_EXPR && integer_zerop (op2))
1789 || (code == EQ_EXPR && integer_nonzerop (op2)));
1790 s = SSA_NAME_DEF_STMT (expr);
1791 if (is_gimple_assign (s)
1792 && gimple_assign_rhs_code (s) == code
1793 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1794 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1795 return true;
1798 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1799 of name is a comparison, recurse. */
1800 if (TREE_CODE (op1) == SSA_NAME
1801 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1803 s = SSA_NAME_DEF_STMT (op1);
1804 if (is_gimple_assign (s)
1805 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1807 enum tree_code c = gimple_assign_rhs_code (s);
1808 if ((c == NE_EXPR && integer_zerop (op2))
1809 || (c == EQ_EXPR && integer_nonzerop (op2)))
1810 return same_bool_comparison_p (expr, c,
1811 gimple_assign_rhs1 (s),
1812 gimple_assign_rhs2 (s));
1813 if ((c == EQ_EXPR && integer_zerop (op2))
1814 || (c == NE_EXPR && integer_nonzerop (op2)))
1815 return same_bool_comparison_p (expr,
1816 invert_tree_comparison (c, false),
1817 gimple_assign_rhs1 (s),
1818 gimple_assign_rhs2 (s));
1821 return false;
1824 /* Check to see if two boolean expressions OP1 and OP2 are logically
1825 equivalent. */
1827 static bool
1828 same_bool_result_p (const_tree op1, const_tree op2)
1830 /* Simple cases first. */
1831 if (operand_equal_p (op1, op2, 0))
1832 return true;
1834 /* Check the cases where at least one of the operands is a comparison.
1835 These are a bit smarter than operand_equal_p in that they apply some
1836 identifies on SSA_NAMEs. */
1837 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1838 && same_bool_comparison_p (op1, TREE_CODE (op2),
1839 TREE_OPERAND (op2, 0),
1840 TREE_OPERAND (op2, 1)))
1841 return true;
1842 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1843 && same_bool_comparison_p (op2, TREE_CODE (op1),
1844 TREE_OPERAND (op1, 0),
1845 TREE_OPERAND (op1, 1)))
1846 return true;
1848 /* Default case. */
1849 return false;
1852 /* Forward declarations for some mutually recursive functions. */
1854 static tree
1855 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1856 enum tree_code code2, tree op2a, tree op2b);
1857 static tree
1858 and_var_with_comparison (tree var, bool invert,
1859 enum tree_code code2, tree op2a, tree op2b);
1860 static tree
1861 and_var_with_comparison_1 (gimple stmt,
1862 enum tree_code code2, tree op2a, tree op2b);
1863 static tree
1864 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1865 enum tree_code code2, tree op2a, tree op2b);
1866 static tree
1867 or_var_with_comparison (tree var, bool invert,
1868 enum tree_code code2, tree op2a, tree op2b);
1869 static tree
1870 or_var_with_comparison_1 (gimple stmt,
1871 enum tree_code code2, tree op2a, tree op2b);
1873 /* Helper function for and_comparisons_1: try to simplify the AND of the
1874 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1875 If INVERT is true, invert the value of the VAR before doing the AND.
1876 Return NULL_EXPR if we can't simplify this to a single expression. */
1878 static tree
1879 and_var_with_comparison (tree var, bool invert,
1880 enum tree_code code2, tree op2a, tree op2b)
1882 tree t;
1883 gimple stmt = SSA_NAME_DEF_STMT (var);
1885 /* We can only deal with variables whose definitions are assignments. */
1886 if (!is_gimple_assign (stmt))
1887 return NULL_TREE;
1889 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1890 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1891 Then we only have to consider the simpler non-inverted cases. */
1892 if (invert)
1893 t = or_var_with_comparison_1 (stmt,
1894 invert_tree_comparison (code2, false),
1895 op2a, op2b);
1896 else
1897 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1898 return canonicalize_bool (t, invert);
1901 /* Try to simplify the AND of the ssa variable defined by the assignment
1902 STMT with the comparison specified by (OP2A CODE2 OP2B).
1903 Return NULL_EXPR if we can't simplify this to a single expression. */
1905 static tree
1906 and_var_with_comparison_1 (gimple stmt,
1907 enum tree_code code2, tree op2a, tree op2b)
1909 tree var = gimple_assign_lhs (stmt);
1910 tree true_test_var = NULL_TREE;
1911 tree false_test_var = NULL_TREE;
1912 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1914 /* Check for identities like (var AND (var == 0)) => false. */
1915 if (TREE_CODE (op2a) == SSA_NAME
1916 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1918 if ((code2 == NE_EXPR && integer_zerop (op2b))
1919 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1921 true_test_var = op2a;
1922 if (var == true_test_var)
1923 return var;
1925 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1926 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1928 false_test_var = op2a;
1929 if (var == false_test_var)
1930 return boolean_false_node;
1934 /* If the definition is a comparison, recurse on it. */
1935 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1937 tree t = and_comparisons_1 (innercode,
1938 gimple_assign_rhs1 (stmt),
1939 gimple_assign_rhs2 (stmt),
1940 code2,
1941 op2a,
1942 op2b);
1943 if (t)
1944 return t;
1947 /* If the definition is an AND or OR expression, we may be able to
1948 simplify by reassociating. */
1949 if (innercode == TRUTH_AND_EXPR
1950 || innercode == TRUTH_OR_EXPR
1951 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1952 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1954 tree inner1 = gimple_assign_rhs1 (stmt);
1955 tree inner2 = gimple_assign_rhs2 (stmt);
1956 gimple s;
1957 tree t;
1958 tree partial = NULL_TREE;
1959 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1961 /* Check for boolean identities that don't require recursive examination
1962 of inner1/inner2:
1963 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1964 inner1 AND (inner1 OR inner2) => inner1
1965 !inner1 AND (inner1 AND inner2) => false
1966 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1967 Likewise for similar cases involving inner2. */
1968 if (inner1 == true_test_var)
1969 return (is_and ? var : inner1);
1970 else if (inner2 == true_test_var)
1971 return (is_and ? var : inner2);
1972 else if (inner1 == false_test_var)
1973 return (is_and
1974 ? boolean_false_node
1975 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1976 else if (inner2 == false_test_var)
1977 return (is_and
1978 ? boolean_false_node
1979 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1981 /* Next, redistribute/reassociate the AND across the inner tests.
1982 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1983 if (TREE_CODE (inner1) == SSA_NAME
1984 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1985 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1986 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1987 gimple_assign_rhs1 (s),
1988 gimple_assign_rhs2 (s),
1989 code2, op2a, op2b)))
1991 /* Handle the AND case, where we are reassociating:
1992 (inner1 AND inner2) AND (op2a code2 op2b)
1993 => (t AND inner2)
1994 If the partial result t is a constant, we win. Otherwise
1995 continue on to try reassociating with the other inner test. */
1996 if (is_and)
1998 if (integer_onep (t))
1999 return inner2;
2000 else if (integer_zerop (t))
2001 return boolean_false_node;
2004 /* Handle the OR case, where we are redistributing:
2005 (inner1 OR inner2) AND (op2a code2 op2b)
2006 => (t OR (inner2 AND (op2a code2 op2b))) */
2007 else
2009 if (integer_onep (t))
2010 return boolean_true_node;
2011 else
2012 /* Save partial result for later. */
2013 partial = t;
2017 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2018 if (TREE_CODE (inner2) == SSA_NAME
2019 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2020 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2021 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2022 gimple_assign_rhs1 (s),
2023 gimple_assign_rhs2 (s),
2024 code2, op2a, op2b)))
2026 /* Handle the AND case, where we are reassociating:
2027 (inner1 AND inner2) AND (op2a code2 op2b)
2028 => (inner1 AND t) */
2029 if (is_and)
2031 if (integer_onep (t))
2032 return inner1;
2033 else if (integer_zerop (t))
2034 return boolean_false_node;
2037 /* Handle the OR case. where we are redistributing:
2038 (inner1 OR inner2) AND (op2a code2 op2b)
2039 => (t OR (inner1 AND (op2a code2 op2b)))
2040 => (t OR partial) */
2041 else
2043 if (integer_onep (t))
2044 return boolean_true_node;
2045 else if (partial)
2047 /* We already got a simplification for the other
2048 operand to the redistributed OR expression. The
2049 interesting case is when at least one is false.
2050 Or, if both are the same, we can apply the identity
2051 (x OR x) == x. */
2052 if (integer_zerop (partial))
2053 return t;
2054 else if (integer_zerop (t))
2055 return partial;
2056 else if (same_bool_result_p (t, partial))
2057 return t;
2062 return NULL_TREE;
2065 /* Try to simplify the AND of two comparisons defined by
2066 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2067 If this can be done without constructing an intermediate value,
2068 return the resulting tree; otherwise NULL_TREE is returned.
2069 This function is deliberately asymmetric as it recurses on SSA_DEFs
2070 in the first comparison but not the second. */
2072 static tree
2073 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2074 enum tree_code code2, tree op2a, tree op2b)
2076 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2077 if (operand_equal_p (op1a, op2a, 0)
2078 && operand_equal_p (op1b, op2b, 0))
2080 tree t = combine_comparisons (UNKNOWN_LOCATION,
2081 TRUTH_ANDIF_EXPR, code1, code2,
2082 boolean_type_node, op1a, op1b);
2083 if (t)
2084 return t;
2087 /* Likewise the swapped case of the above. */
2088 if (operand_equal_p (op1a, op2b, 0)
2089 && operand_equal_p (op1b, op2a, 0))
2091 tree t = combine_comparisons (UNKNOWN_LOCATION,
2092 TRUTH_ANDIF_EXPR, code1,
2093 swap_tree_comparison (code2),
2094 boolean_type_node, op1a, op1b);
2095 if (t)
2096 return t;
2099 /* If both comparisons are of the same value against constants, we might
2100 be able to merge them. */
2101 if (operand_equal_p (op1a, op2a, 0)
2102 && TREE_CODE (op1b) == INTEGER_CST
2103 && TREE_CODE (op2b) == INTEGER_CST)
2105 int cmp = tree_int_cst_compare (op1b, op2b);
2107 /* If we have (op1a == op1b), we should either be able to
2108 return that or FALSE, depending on whether the constant op1b
2109 also satisfies the other comparison against op2b. */
2110 if (code1 == EQ_EXPR)
2112 bool done = true;
2113 bool val;
2114 switch (code2)
2116 case EQ_EXPR: val = (cmp == 0); break;
2117 case NE_EXPR: val = (cmp != 0); break;
2118 case LT_EXPR: val = (cmp < 0); break;
2119 case GT_EXPR: val = (cmp > 0); break;
2120 case LE_EXPR: val = (cmp <= 0); break;
2121 case GE_EXPR: val = (cmp >= 0); break;
2122 default: done = false;
2124 if (done)
2126 if (val)
2127 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2128 else
2129 return boolean_false_node;
2132 /* Likewise if the second comparison is an == comparison. */
2133 else if (code2 == EQ_EXPR)
2135 bool done = true;
2136 bool val;
2137 switch (code1)
2139 case EQ_EXPR: val = (cmp == 0); break;
2140 case NE_EXPR: val = (cmp != 0); break;
2141 case LT_EXPR: val = (cmp > 0); break;
2142 case GT_EXPR: val = (cmp < 0); break;
2143 case LE_EXPR: val = (cmp >= 0); break;
2144 case GE_EXPR: val = (cmp <= 0); break;
2145 default: done = false;
2147 if (done)
2149 if (val)
2150 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2151 else
2152 return boolean_false_node;
2156 /* Same business with inequality tests. */
2157 else if (code1 == NE_EXPR)
2159 bool val;
2160 switch (code2)
2162 case EQ_EXPR: val = (cmp != 0); break;
2163 case NE_EXPR: val = (cmp == 0); break;
2164 case LT_EXPR: val = (cmp >= 0); break;
2165 case GT_EXPR: val = (cmp <= 0); break;
2166 case LE_EXPR: val = (cmp > 0); break;
2167 case GE_EXPR: val = (cmp < 0); break;
2168 default:
2169 val = false;
2171 if (val)
2172 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2174 else if (code2 == NE_EXPR)
2176 bool val;
2177 switch (code1)
2179 case EQ_EXPR: val = (cmp == 0); break;
2180 case NE_EXPR: val = (cmp != 0); break;
2181 case LT_EXPR: val = (cmp <= 0); break;
2182 case GT_EXPR: val = (cmp >= 0); break;
2183 case LE_EXPR: val = (cmp < 0); break;
2184 case GE_EXPR: val = (cmp > 0); break;
2185 default:
2186 val = false;
2188 if (val)
2189 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2192 /* Chose the more restrictive of two < or <= comparisons. */
2193 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2194 && (code2 == LT_EXPR || code2 == LE_EXPR))
2196 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2197 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2198 else
2199 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2202 /* Likewise chose the more restrictive of two > or >= comparisons. */
2203 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2204 && (code2 == GT_EXPR || code2 == GE_EXPR))
2206 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2207 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2208 else
2209 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2212 /* Check for singleton ranges. */
2213 else if (cmp == 0
2214 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2215 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2216 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2218 /* Check for disjoint ranges. */
2219 else if (cmp <= 0
2220 && (code1 == LT_EXPR || code1 == LE_EXPR)
2221 && (code2 == GT_EXPR || code2 == GE_EXPR))
2222 return boolean_false_node;
2223 else if (cmp >= 0
2224 && (code1 == GT_EXPR || code1 == GE_EXPR)
2225 && (code2 == LT_EXPR || code2 == LE_EXPR))
2226 return boolean_false_node;
2229 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2230 NAME's definition is a truth value. See if there are any simplifications
2231 that can be done against the NAME's definition. */
2232 if (TREE_CODE (op1a) == SSA_NAME
2233 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2234 && (integer_zerop (op1b) || integer_onep (op1b)))
2236 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2237 || (code1 == NE_EXPR && integer_onep (op1b)));
2238 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2239 switch (gimple_code (stmt))
2241 case GIMPLE_ASSIGN:
2242 /* Try to simplify by copy-propagating the definition. */
2243 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2245 case GIMPLE_PHI:
2246 /* If every argument to the PHI produces the same result when
2247 ANDed with the second comparison, we win.
2248 Do not do this unless the type is bool since we need a bool
2249 result here anyway. */
2250 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2252 tree result = NULL_TREE;
2253 unsigned i;
2254 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2256 tree arg = gimple_phi_arg_def (stmt, i);
2258 /* If this PHI has itself as an argument, ignore it.
2259 If all the other args produce the same result,
2260 we're still OK. */
2261 if (arg == gimple_phi_result (stmt))
2262 continue;
2263 else if (TREE_CODE (arg) == INTEGER_CST)
2265 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2267 if (!result)
2268 result = boolean_false_node;
2269 else if (!integer_zerop (result))
2270 return NULL_TREE;
2272 else if (!result)
2273 result = fold_build2 (code2, boolean_type_node,
2274 op2a, op2b);
2275 else if (!same_bool_comparison_p (result,
2276 code2, op2a, op2b))
2277 return NULL_TREE;
2279 else if (TREE_CODE (arg) == SSA_NAME)
2281 tree temp = and_var_with_comparison (arg, invert,
2282 code2, op2a, op2b);
2283 if (!temp)
2284 return NULL_TREE;
2285 else if (!result)
2286 result = temp;
2287 else if (!same_bool_result_p (result, temp))
2288 return NULL_TREE;
2290 else
2291 return NULL_TREE;
2293 return result;
2296 default:
2297 break;
2300 return NULL_TREE;
2303 /* Try to simplify the AND of two comparisons, specified by
2304 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2305 If this can be simplified to a single expression (without requiring
2306 introducing more SSA variables to hold intermediate values),
2307 return the resulting tree. Otherwise return NULL_TREE.
2308 If the result expression is non-null, it has boolean type. */
2310 tree
2311 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2312 enum tree_code code2, tree op2a, tree op2b)
2314 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2315 if (t)
2316 return t;
2317 else
2318 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2321 /* Helper function for or_comparisons_1: try to simplify the OR of the
2322 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2323 If INVERT is true, invert the value of VAR before doing the OR.
2324 Return NULL_EXPR if we can't simplify this to a single expression. */
2326 static tree
2327 or_var_with_comparison (tree var, bool invert,
2328 enum tree_code code2, tree op2a, tree op2b)
2330 tree t;
2331 gimple stmt = SSA_NAME_DEF_STMT (var);
2333 /* We can only deal with variables whose definitions are assignments. */
2334 if (!is_gimple_assign (stmt))
2335 return NULL_TREE;
2337 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2338 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2339 Then we only have to consider the simpler non-inverted cases. */
2340 if (invert)
2341 t = and_var_with_comparison_1 (stmt,
2342 invert_tree_comparison (code2, false),
2343 op2a, op2b);
2344 else
2345 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2346 return canonicalize_bool (t, invert);
2349 /* Try to simplify the OR of the ssa variable defined by the assignment
2350 STMT with the comparison specified by (OP2A CODE2 OP2B).
2351 Return NULL_EXPR if we can't simplify this to a single expression. */
2353 static tree
2354 or_var_with_comparison_1 (gimple stmt,
2355 enum tree_code code2, tree op2a, tree op2b)
2357 tree var = gimple_assign_lhs (stmt);
2358 tree true_test_var = NULL_TREE;
2359 tree false_test_var = NULL_TREE;
2360 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2362 /* Check for identities like (var OR (var != 0)) => true . */
2363 if (TREE_CODE (op2a) == SSA_NAME
2364 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2366 if ((code2 == NE_EXPR && integer_zerop (op2b))
2367 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2369 true_test_var = op2a;
2370 if (var == true_test_var)
2371 return var;
2373 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2374 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2376 false_test_var = op2a;
2377 if (var == false_test_var)
2378 return boolean_true_node;
2382 /* If the definition is a comparison, recurse on it. */
2383 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2385 tree t = or_comparisons_1 (innercode,
2386 gimple_assign_rhs1 (stmt),
2387 gimple_assign_rhs2 (stmt),
2388 code2,
2389 op2a,
2390 op2b);
2391 if (t)
2392 return t;
2395 /* If the definition is an AND or OR expression, we may be able to
2396 simplify by reassociating. */
2397 if (innercode == TRUTH_AND_EXPR
2398 || innercode == TRUTH_OR_EXPR
2399 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2400 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2402 tree inner1 = gimple_assign_rhs1 (stmt);
2403 tree inner2 = gimple_assign_rhs2 (stmt);
2404 gimple s;
2405 tree t;
2406 tree partial = NULL_TREE;
2407 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2409 /* Check for boolean identities that don't require recursive examination
2410 of inner1/inner2:
2411 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2412 inner1 OR (inner1 AND inner2) => inner1
2413 !inner1 OR (inner1 OR inner2) => true
2414 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2416 if (inner1 == true_test_var)
2417 return (is_or ? var : inner1);
2418 else if (inner2 == true_test_var)
2419 return (is_or ? var : inner2);
2420 else if (inner1 == false_test_var)
2421 return (is_or
2422 ? boolean_true_node
2423 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2424 else if (inner2 == false_test_var)
2425 return (is_or
2426 ? boolean_true_node
2427 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2429 /* Next, redistribute/reassociate the OR across the inner tests.
2430 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2431 if (TREE_CODE (inner1) == SSA_NAME
2432 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2433 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2434 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2435 gimple_assign_rhs1 (s),
2436 gimple_assign_rhs2 (s),
2437 code2, op2a, op2b)))
2439 /* Handle the OR case, where we are reassociating:
2440 (inner1 OR inner2) OR (op2a code2 op2b)
2441 => (t OR inner2)
2442 If the partial result t is a constant, we win. Otherwise
2443 continue on to try reassociating with the other inner test. */
2444 if (innercode == TRUTH_OR_EXPR)
2446 if (integer_onep (t))
2447 return boolean_true_node;
2448 else if (integer_zerop (t))
2449 return inner2;
2452 /* Handle the AND case, where we are redistributing:
2453 (inner1 AND inner2) OR (op2a code2 op2b)
2454 => (t AND (inner2 OR (op2a code op2b))) */
2455 else
2457 if (integer_zerop (t))
2458 return boolean_false_node;
2459 else
2460 /* Save partial result for later. */
2461 partial = t;
2465 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2466 if (TREE_CODE (inner2) == SSA_NAME
2467 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2468 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2469 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2470 gimple_assign_rhs1 (s),
2471 gimple_assign_rhs2 (s),
2472 code2, op2a, op2b)))
2474 /* Handle the OR case, where we are reassociating:
2475 (inner1 OR inner2) OR (op2a code2 op2b)
2476 => (inner1 OR t) */
2477 if (innercode == TRUTH_OR_EXPR)
2479 if (integer_zerop (t))
2480 return inner1;
2481 else if (integer_onep (t))
2482 return boolean_true_node;
2485 /* Handle the AND case, where we are redistributing:
2486 (inner1 AND inner2) OR (op2a code2 op2b)
2487 => (t AND (inner1 OR (op2a code2 op2b)))
2488 => (t AND partial) */
2489 else
2491 if (integer_zerop (t))
2492 return boolean_false_node;
2493 else if (partial)
2495 /* We already got a simplification for the other
2496 operand to the redistributed AND expression. The
2497 interesting case is when at least one is true.
2498 Or, if both are the same, we can apply the identity
2499 (x AND x) == true. */
2500 if (integer_onep (partial))
2501 return t;
2502 else if (integer_onep (t))
2503 return partial;
2504 else if (same_bool_result_p (t, partial))
2505 return boolean_true_node;
2510 return NULL_TREE;
2513 /* Try to simplify the OR of two comparisons defined by
2514 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2515 If this can be done without constructing an intermediate value,
2516 return the resulting tree; otherwise NULL_TREE is returned.
2517 This function is deliberately asymmetric as it recurses on SSA_DEFs
2518 in the first comparison but not the second. */
2520 static tree
2521 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2522 enum tree_code code2, tree op2a, tree op2b)
2524 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2525 if (operand_equal_p (op1a, op2a, 0)
2526 && operand_equal_p (op1b, op2b, 0))
2528 tree t = combine_comparisons (UNKNOWN_LOCATION,
2529 TRUTH_ORIF_EXPR, code1, code2,
2530 boolean_type_node, op1a, op1b);
2531 if (t)
2532 return t;
2535 /* Likewise the swapped case of the above. */
2536 if (operand_equal_p (op1a, op2b, 0)
2537 && operand_equal_p (op1b, op2a, 0))
2539 tree t = combine_comparisons (UNKNOWN_LOCATION,
2540 TRUTH_ORIF_EXPR, code1,
2541 swap_tree_comparison (code2),
2542 boolean_type_node, op1a, op1b);
2543 if (t)
2544 return t;
2547 /* If both comparisons are of the same value against constants, we might
2548 be able to merge them. */
2549 if (operand_equal_p (op1a, op2a, 0)
2550 && TREE_CODE (op1b) == INTEGER_CST
2551 && TREE_CODE (op2b) == INTEGER_CST)
2553 int cmp = tree_int_cst_compare (op1b, op2b);
2555 /* If we have (op1a != op1b), we should either be able to
2556 return that or TRUE, depending on whether the constant op1b
2557 also satisfies the other comparison against op2b. */
2558 if (code1 == NE_EXPR)
2560 bool done = true;
2561 bool val;
2562 switch (code2)
2564 case EQ_EXPR: val = (cmp == 0); break;
2565 case NE_EXPR: val = (cmp != 0); break;
2566 case LT_EXPR: val = (cmp < 0); break;
2567 case GT_EXPR: val = (cmp > 0); break;
2568 case LE_EXPR: val = (cmp <= 0); break;
2569 case GE_EXPR: val = (cmp >= 0); break;
2570 default: done = false;
2572 if (done)
2574 if (val)
2575 return boolean_true_node;
2576 else
2577 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2580 /* Likewise if the second comparison is a != comparison. */
2581 else if (code2 == NE_EXPR)
2583 bool done = true;
2584 bool val;
2585 switch (code1)
2587 case EQ_EXPR: val = (cmp == 0); break;
2588 case NE_EXPR: val = (cmp != 0); break;
2589 case LT_EXPR: val = (cmp > 0); break;
2590 case GT_EXPR: val = (cmp < 0); break;
2591 case LE_EXPR: val = (cmp >= 0); break;
2592 case GE_EXPR: val = (cmp <= 0); break;
2593 default: done = false;
2595 if (done)
2597 if (val)
2598 return boolean_true_node;
2599 else
2600 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2604 /* See if an equality test is redundant with the other comparison. */
2605 else if (code1 == EQ_EXPR)
2607 bool val;
2608 switch (code2)
2610 case EQ_EXPR: val = (cmp == 0); break;
2611 case NE_EXPR: val = (cmp != 0); break;
2612 case LT_EXPR: val = (cmp < 0); break;
2613 case GT_EXPR: val = (cmp > 0); break;
2614 case LE_EXPR: val = (cmp <= 0); break;
2615 case GE_EXPR: val = (cmp >= 0); break;
2616 default:
2617 val = false;
2619 if (val)
2620 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2622 else if (code2 == EQ_EXPR)
2624 bool val;
2625 switch (code1)
2627 case EQ_EXPR: val = (cmp == 0); break;
2628 case NE_EXPR: val = (cmp != 0); break;
2629 case LT_EXPR: val = (cmp > 0); break;
2630 case GT_EXPR: val = (cmp < 0); break;
2631 case LE_EXPR: val = (cmp >= 0); break;
2632 case GE_EXPR: val = (cmp <= 0); break;
2633 default:
2634 val = false;
2636 if (val)
2637 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2640 /* Chose the less restrictive of two < or <= comparisons. */
2641 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2642 && (code2 == LT_EXPR || code2 == LE_EXPR))
2644 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2645 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2646 else
2647 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2650 /* Likewise chose the less restrictive of two > or >= comparisons. */
2651 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2652 && (code2 == GT_EXPR || code2 == GE_EXPR))
2654 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2655 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2656 else
2657 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2660 /* Check for singleton ranges. */
2661 else if (cmp == 0
2662 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2663 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2664 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2666 /* Check for less/greater pairs that don't restrict the range at all. */
2667 else if (cmp >= 0
2668 && (code1 == LT_EXPR || code1 == LE_EXPR)
2669 && (code2 == GT_EXPR || code2 == GE_EXPR))
2670 return boolean_true_node;
2671 else if (cmp <= 0
2672 && (code1 == GT_EXPR || code1 == GE_EXPR)
2673 && (code2 == LT_EXPR || code2 == LE_EXPR))
2674 return boolean_true_node;
2677 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2678 NAME's definition is a truth value. See if there are any simplifications
2679 that can be done against the NAME's definition. */
2680 if (TREE_CODE (op1a) == SSA_NAME
2681 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2682 && (integer_zerop (op1b) || integer_onep (op1b)))
2684 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2685 || (code1 == NE_EXPR && integer_onep (op1b)));
2686 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2687 switch (gimple_code (stmt))
2689 case GIMPLE_ASSIGN:
2690 /* Try to simplify by copy-propagating the definition. */
2691 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2693 case GIMPLE_PHI:
2694 /* If every argument to the PHI produces the same result when
2695 ORed with the second comparison, we win.
2696 Do not do this unless the type is bool since we need a bool
2697 result here anyway. */
2698 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2700 tree result = NULL_TREE;
2701 unsigned i;
2702 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2704 tree arg = gimple_phi_arg_def (stmt, i);
2706 /* If this PHI has itself as an argument, ignore it.
2707 If all the other args produce the same result,
2708 we're still OK. */
2709 if (arg == gimple_phi_result (stmt))
2710 continue;
2711 else if (TREE_CODE (arg) == INTEGER_CST)
2713 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2715 if (!result)
2716 result = boolean_true_node;
2717 else if (!integer_onep (result))
2718 return NULL_TREE;
2720 else if (!result)
2721 result = fold_build2 (code2, boolean_type_node,
2722 op2a, op2b);
2723 else if (!same_bool_comparison_p (result,
2724 code2, op2a, op2b))
2725 return NULL_TREE;
2727 else if (TREE_CODE (arg) == SSA_NAME)
2729 tree temp = or_var_with_comparison (arg, invert,
2730 code2, op2a, op2b);
2731 if (!temp)
2732 return NULL_TREE;
2733 else if (!result)
2734 result = temp;
2735 else if (!same_bool_result_p (result, temp))
2736 return NULL_TREE;
2738 else
2739 return NULL_TREE;
2741 return result;
2744 default:
2745 break;
2748 return NULL_TREE;
2751 /* Try to simplify the OR of two comparisons, specified by
2752 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2753 If this can be simplified to a single expression (without requiring
2754 introducing more SSA variables to hold intermediate values),
2755 return the resulting tree. Otherwise return NULL_TREE.
2756 If the result expression is non-null, it has boolean type. */
2758 tree
2759 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2760 enum tree_code code2, tree op2a, tree op2b)
2762 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2763 if (t)
2764 return t;
2765 else
2766 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);