2010-07-27 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc/alias-decl.git] / gcc / gimple-fold.c
bloba5301bad5ed726cb0089bdaba016d118b8afb51c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
35 /* If SYM is a constant variable with known value, return the value.
36 NULL_TREE is returned otherwise. */
38 tree
39 get_symbol_constant_value (tree sym)
41 if (TREE_STATIC (sym)
42 && (TREE_READONLY (sym)
43 || TREE_CODE (sym) == CONST_DECL))
45 tree val = DECL_INITIAL (sym);
46 if (val)
48 STRIP_NOPS (val);
49 if (is_gimple_min_invariant (val))
51 if (TREE_CODE (val) == ADDR_EXPR)
53 tree base = get_base_address (TREE_OPERAND (val, 0));
54 if (base && TREE_CODE (base) == VAR_DECL)
56 TREE_ADDRESSABLE (base) = 1;
57 if (gimple_referenced_vars (cfun))
58 add_referenced_var (base);
61 return val;
64 /* Variables declared 'const' without an initializer
65 have zero as the initializer if they may not be
66 overridden at link or run time. */
67 if (!val
68 && !DECL_EXTERNAL (sym)
69 && targetm.binds_local_p (sym)
70 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
71 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
72 return fold_convert (TREE_TYPE (sym), integer_zero_node);
75 return NULL_TREE;
79 /* Return true if we may propagate the address expression ADDR into the
80 dereference DEREF and cancel them. */
82 bool
83 may_propagate_address_into_dereference (tree addr, tree deref)
85 gcc_assert (TREE_CODE (deref) == MEM_REF
86 && TREE_CODE (addr) == ADDR_EXPR);
88 /* Don't propagate if ADDR's operand has incomplete type. */
89 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
90 return false;
92 /* If the address is invariant then we do not need to preserve restrict
93 qualifications. But we do need to preserve volatile qualifiers until
94 we can annotate the folded dereference itself properly. */
95 if (is_gimple_min_invariant (addr)
96 && (!TREE_THIS_VOLATILE (deref)
97 || TYPE_VOLATILE (TREE_TYPE (addr))))
98 return useless_type_conversion_p (TREE_TYPE (deref),
99 TREE_TYPE (TREE_OPERAND (addr, 0)));
101 /* Else both the address substitution and the folding must result in
102 a valid useless type conversion sequence. */
103 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
104 TREE_TYPE (addr))
105 && useless_type_conversion_p (TREE_TYPE (deref),
106 TREE_TYPE (TREE_OPERAND (addr, 0))));
110 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
111 BASE is an array type. OFFSET is a byte displacement.
113 LOC is the location of the original expression. */
115 static tree
116 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
118 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
119 tree array_type, elt_type, elt_size;
120 tree domain_type;
122 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
123 measured in units of the size of elements type) from that ARRAY_REF).
124 We can't do anything if either is variable.
126 The case we handle here is *(&A[N]+O). */
127 if (TREE_CODE (base) == ARRAY_REF)
129 tree low_bound = array_ref_low_bound (base);
131 elt_offset = TREE_OPERAND (base, 1);
132 if (TREE_CODE (low_bound) != INTEGER_CST
133 || TREE_CODE (elt_offset) != INTEGER_CST)
134 return NULL_TREE;
136 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
137 base = TREE_OPERAND (base, 0);
140 /* Ignore stupid user tricks of indexing non-array variables. */
141 array_type = TREE_TYPE (base);
142 if (TREE_CODE (array_type) != ARRAY_TYPE)
143 return NULL_TREE;
144 elt_type = TREE_TYPE (array_type);
146 /* Use signed size type for intermediate computation on the index. */
147 idx_type = ssizetype;
149 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
150 element type (so we can use the alignment if it's not constant).
151 Otherwise, compute the offset as an index by using a division. If the
152 division isn't exact, then don't do anything. */
153 elt_size = TYPE_SIZE_UNIT (elt_type);
154 if (!elt_size)
155 return NULL;
156 if (integer_zerop (offset))
158 if (TREE_CODE (elt_size) != INTEGER_CST)
159 elt_size = size_int (TYPE_ALIGN (elt_type));
161 idx = build_int_cst (idx_type, 0);
163 else
165 unsigned HOST_WIDE_INT lquo, lrem;
166 HOST_WIDE_INT hquo, hrem;
167 double_int soffset;
169 /* The final array offset should be signed, so we need
170 to sign-extend the (possibly pointer) offset here
171 and use signed division. */
172 soffset = double_int_sext (tree_to_double_int (offset),
173 TYPE_PRECISION (TREE_TYPE (offset)));
174 if (TREE_CODE (elt_size) != INTEGER_CST
175 || div_and_round_double (TRUNC_DIV_EXPR, 0,
176 soffset.low, soffset.high,
177 TREE_INT_CST_LOW (elt_size),
178 TREE_INT_CST_HIGH (elt_size),
179 &lquo, &hquo, &lrem, &hrem)
180 || lrem || hrem)
181 return NULL_TREE;
183 idx = build_int_cst_wide (idx_type, lquo, hquo);
186 /* Assume the low bound is zero. If there is a domain type, get the
187 low bound, if any, convert the index into that type, and add the
188 low bound. */
189 min_idx = build_int_cst (idx_type, 0);
190 domain_type = TYPE_DOMAIN (array_type);
191 if (domain_type)
193 idx_type = domain_type;
194 if (TYPE_MIN_VALUE (idx_type))
195 min_idx = TYPE_MIN_VALUE (idx_type);
196 else
197 min_idx = fold_convert (idx_type, min_idx);
199 if (TREE_CODE (min_idx) != INTEGER_CST)
200 return NULL_TREE;
202 elt_offset = fold_convert (idx_type, elt_offset);
205 if (!integer_zerop (min_idx))
206 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
207 if (!integer_zerop (elt_offset))
208 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
210 /* Make sure to possibly truncate late after offsetting. */
211 idx = fold_convert (idx_type, idx);
213 /* We don't want to construct access past array bounds. For example
214 char *(c[4]);
215 c[3][2];
216 should not be simplified into (*c)[14] or tree-vrp will
217 give false warnings.
218 This is only an issue for multi-dimensional arrays. */
219 if (TREE_CODE (elt_type) == ARRAY_TYPE
220 && domain_type)
222 if (TYPE_MAX_VALUE (domain_type)
223 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
224 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
225 return NULL_TREE;
226 else if (TYPE_MIN_VALUE (domain_type)
227 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
228 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
229 return NULL_TREE;
230 else if (compare_tree_int (idx, 0) < 0)
231 return NULL_TREE;
235 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
236 SET_EXPR_LOCATION (t, loc);
237 return t;
242 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
243 LOC is the location of original expression.
245 Before attempting the conversion strip off existing ADDR_EXPRs. */
247 tree
248 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
249 tree orig_type)
251 tree ret;
253 STRIP_NOPS (base);
254 if (TREE_CODE (base) != ADDR_EXPR)
255 return NULL_TREE;
257 base = TREE_OPERAND (base, 0);
258 if (types_compatible_p (orig_type, TREE_TYPE (base))
259 && integer_zerop (offset))
260 return base;
262 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
263 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
264 return ret;
265 return NULL_TREE;
268 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
269 LOC is the location of the original expression. */
271 tree
272 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
273 tree orig_type)
275 tree base, ret;
277 STRIP_NOPS (addr);
278 if (TREE_CODE (addr) != ADDR_EXPR)
279 return NULL_TREE;
280 base = TREE_OPERAND (addr, 0);
281 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
282 if (ret)
284 ret = build_fold_addr_expr (ret);
285 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
286 return NULL_TREE;
287 SET_EXPR_LOCATION (ret, loc);
290 return ret;
294 /* A quaint feature extant in our address arithmetic is that there
295 can be hidden type changes here. The type of the result need
296 not be the same as the type of the input pointer.
298 What we're after here is an expression of the form
299 (T *)(&array + const)
300 where array is OP0, const is OP1, RES_TYPE is T and
301 the cast doesn't actually exist, but is implicit in the
302 type of the POINTER_PLUS_EXPR. We'd like to turn this into
303 &array[x]
304 which may be able to propagate further. */
306 tree
307 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
309 tree ptd_type;
310 tree t;
312 /* The first operand should be an ADDR_EXPR. */
313 if (TREE_CODE (op0) != ADDR_EXPR)
314 return NULL_TREE;
315 op0 = TREE_OPERAND (op0, 0);
317 /* It had better be a constant. */
318 if (TREE_CODE (op1) != INTEGER_CST)
320 /* Or op0 should now be A[0] and the non-constant offset defined
321 via a multiplication by the array element size. */
322 if (TREE_CODE (op0) == ARRAY_REF
323 /* As we will end up creating a variable index array access
324 in the outermost array dimension make sure there isn't
325 a more inner array that the index could overflow to. */
326 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
327 && integer_zerop (TREE_OPERAND (op0, 1))
328 && TREE_CODE (op1) == SSA_NAME)
330 gimple offset_def = SSA_NAME_DEF_STMT (op1);
331 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
332 if (!host_integerp (elsz, 1)
333 || !is_gimple_assign (offset_def))
334 return NULL_TREE;
336 /* Do not build array references of something that we can't
337 see the true number of array dimensions for. */
338 if (!DECL_P (TREE_OPERAND (op0, 0))
339 && !handled_component_p (TREE_OPERAND (op0, 0)))
340 return NULL_TREE;
342 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
343 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
344 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
345 return build_fold_addr_expr
346 (build4 (ARRAY_REF, TREE_TYPE (op0),
347 TREE_OPERAND (op0, 0),
348 gimple_assign_rhs1 (offset_def),
349 TREE_OPERAND (op0, 2),
350 TREE_OPERAND (op0, 3)));
351 else if (integer_onep (elsz)
352 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
353 return build_fold_addr_expr
354 (build4 (ARRAY_REF, TREE_TYPE (op0),
355 TREE_OPERAND (op0, 0),
356 op1,
357 TREE_OPERAND (op0, 2),
358 TREE_OPERAND (op0, 3)));
360 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
361 /* Dto. */
362 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
363 && TREE_CODE (op1) == SSA_NAME)
365 gimple offset_def = SSA_NAME_DEF_STMT (op1);
366 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
367 if (!host_integerp (elsz, 1)
368 || !is_gimple_assign (offset_def))
369 return NULL_TREE;
371 /* Do not build array references of something that we can't
372 see the true number of array dimensions for. */
373 if (!DECL_P (op0)
374 && !handled_component_p (op0))
375 return NULL_TREE;
377 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
378 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
379 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
380 return build_fold_addr_expr
381 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
382 op0, gimple_assign_rhs1 (offset_def),
383 integer_zero_node, NULL_TREE));
384 else if (integer_onep (elsz)
385 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
386 return build_fold_addr_expr
387 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
388 op0, op1,
389 integer_zero_node, NULL_TREE));
392 return NULL_TREE;
395 /* If the first operand is an ARRAY_REF, expand it so that we can fold
396 the offset into it. */
397 while (TREE_CODE (op0) == ARRAY_REF)
399 tree array_obj = TREE_OPERAND (op0, 0);
400 tree array_idx = TREE_OPERAND (op0, 1);
401 tree elt_type = TREE_TYPE (op0);
402 tree elt_size = TYPE_SIZE_UNIT (elt_type);
403 tree min_idx;
405 if (TREE_CODE (array_idx) != INTEGER_CST)
406 break;
407 if (TREE_CODE (elt_size) != INTEGER_CST)
408 break;
410 /* Un-bias the index by the min index of the array type. */
411 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
412 if (min_idx)
414 min_idx = TYPE_MIN_VALUE (min_idx);
415 if (min_idx)
417 if (TREE_CODE (min_idx) != INTEGER_CST)
418 break;
420 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
421 if (!integer_zerop (min_idx))
422 array_idx = int_const_binop (MINUS_EXPR, array_idx,
423 min_idx, 0);
427 /* Convert the index to a byte offset. */
428 array_idx = fold_convert (sizetype, array_idx);
429 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
431 /* Update the operands for the next round, or for folding. */
432 op1 = int_const_binop (PLUS_EXPR,
433 array_idx, op1, 0);
434 op0 = array_obj;
437 ptd_type = TREE_TYPE (res_type);
438 /* If we want a pointer to void, reconstruct the reference from the
439 array element type. A pointer to that can be trivially converted
440 to void *. This happens as we fold (void *)(ptr p+ off). */
441 if (VOID_TYPE_P (ptd_type)
442 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
443 ptd_type = TREE_TYPE (TREE_TYPE (op0));
445 /* At which point we can try some of the same things as for indirects. */
446 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
447 if (t)
449 t = build_fold_addr_expr (t);
450 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
451 return NULL_TREE;
452 SET_EXPR_LOCATION (t, loc);
455 return t;
458 /* Subroutine of fold_stmt. We perform several simplifications of the
459 memory reference tree EXPR and make sure to re-gimplify them properly
460 after propagation of constant addresses. IS_LHS is true if the
461 reference is supposed to be an lvalue. */
463 static tree
464 maybe_fold_reference (tree expr, bool is_lhs)
466 tree *t = &expr;
468 if (TREE_CODE (expr) == ARRAY_REF
469 && !is_lhs)
471 tree tem = fold_read_from_constant_string (expr);
472 if (tem)
473 return tem;
476 /* ??? We might want to open-code the relevant remaining cases
477 to avoid using the generic fold. */
478 if (handled_component_p (*t)
479 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
481 tree tem = fold (*t);
482 if (tem != *t)
483 return tem;
486 while (handled_component_p (*t))
487 t = &TREE_OPERAND (*t, 0);
489 /* Fold back MEM_REFs to reference trees. */
490 if (TREE_CODE (*t) == MEM_REF
491 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
492 && integer_zerop (TREE_OPERAND (*t, 1))
493 && (TREE_THIS_VOLATILE (*t)
494 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
495 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
496 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
497 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
498 /* We have to look out here to not drop a required conversion
499 from the rhs to the lhs if is_lhs, but we don't have the
500 rhs here to verify that. Thus require strict type
501 compatibility. */
502 && types_compatible_p (TREE_TYPE (*t),
503 TREE_TYPE (TREE_OPERAND
504 (TREE_OPERAND (*t, 0), 0))))
506 tree tem;
507 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
508 tem = maybe_fold_reference (expr, is_lhs);
509 if (tem)
510 return tem;
511 return expr;
513 /* Canonicalize MEM_REFs invariant address operand. */
514 else if (TREE_CODE (*t) == MEM_REF
515 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
516 && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
517 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
519 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
520 TREE_OPERAND (*t, 0),
521 TREE_OPERAND (*t, 1));
522 if (tem)
524 *t = tem;
525 tem = maybe_fold_reference (expr, is_lhs);
526 if (tem)
527 return tem;
528 return expr;
531 else if (!is_lhs
532 && DECL_P (*t))
534 tree tem = get_symbol_constant_value (*t);
535 if (tem
536 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
538 *t = unshare_expr (tem);
539 tem = maybe_fold_reference (expr, is_lhs);
540 if (tem)
541 return tem;
542 return expr;
546 return NULL_TREE;
550 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
551 replacement rhs for the statement or NULL_TREE if no simplification
552 could be made. It is assumed that the operands have been previously
553 folded. */
555 static tree
556 fold_gimple_assign (gimple_stmt_iterator *si)
558 gimple stmt = gsi_stmt (*si);
559 enum tree_code subcode = gimple_assign_rhs_code (stmt);
560 location_t loc = gimple_location (stmt);
562 tree result = NULL_TREE;
564 switch (get_gimple_rhs_class (subcode))
566 case GIMPLE_SINGLE_RHS:
568 tree rhs = gimple_assign_rhs1 (stmt);
570 /* Try to fold a conditional expression. */
571 if (TREE_CODE (rhs) == COND_EXPR)
573 tree op0 = COND_EXPR_COND (rhs);
574 tree tem;
575 bool set = false;
576 location_t cond_loc = EXPR_LOCATION (rhs);
578 if (COMPARISON_CLASS_P (op0))
580 fold_defer_overflow_warnings ();
581 tem = fold_binary_loc (cond_loc,
582 TREE_CODE (op0), TREE_TYPE (op0),
583 TREE_OPERAND (op0, 0),
584 TREE_OPERAND (op0, 1));
585 /* This is actually a conditional expression, not a GIMPLE
586 conditional statement, however, the valid_gimple_rhs_p
587 test still applies. */
588 set = (tem && is_gimple_condexpr (tem)
589 && valid_gimple_rhs_p (tem));
590 fold_undefer_overflow_warnings (set, stmt, 0);
592 else if (is_gimple_min_invariant (op0))
594 tem = op0;
595 set = true;
597 else
598 return NULL_TREE;
600 if (set)
601 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
602 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
605 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
606 return maybe_fold_tmr (rhs);
608 else if (REFERENCE_CLASS_P (rhs))
609 return maybe_fold_reference (rhs, false);
611 else if (TREE_CODE (rhs) == ADDR_EXPR)
613 tree ref = TREE_OPERAND (rhs, 0);
614 tree tem = maybe_fold_reference (ref, true);
615 if (tem
616 && TREE_CODE (tem) == MEM_REF
617 && integer_zerop (TREE_OPERAND (tem, 1)))
618 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
619 else if (tem)
620 result = fold_convert (TREE_TYPE (rhs),
621 build_fold_addr_expr_loc (loc, tem));
622 else if (TREE_CODE (ref) == MEM_REF
623 && integer_zerop (TREE_OPERAND (ref, 1)))
624 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
627 else if (TREE_CODE (rhs) == CONSTRUCTOR
628 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
629 && (CONSTRUCTOR_NELTS (rhs)
630 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
632 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
633 unsigned i;
634 tree val;
636 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
637 if (TREE_CODE (val) != INTEGER_CST
638 && TREE_CODE (val) != REAL_CST
639 && TREE_CODE (val) != FIXED_CST)
640 return NULL_TREE;
642 return build_vector_from_ctor (TREE_TYPE (rhs),
643 CONSTRUCTOR_ELTS (rhs));
646 else if (DECL_P (rhs))
647 return unshare_expr (get_symbol_constant_value (rhs));
649 /* If we couldn't fold the RHS, hand over to the generic
650 fold routines. */
651 if (result == NULL_TREE)
652 result = fold (rhs);
654 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
655 that may have been added by fold, and "useless" type
656 conversions that might now be apparent due to propagation. */
657 STRIP_USELESS_TYPE_CONVERSION (result);
659 if (result != rhs && valid_gimple_rhs_p (result))
660 return result;
662 return NULL_TREE;
664 break;
666 case GIMPLE_UNARY_RHS:
668 tree rhs = gimple_assign_rhs1 (stmt);
670 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
671 if (result)
673 /* If the operation was a conversion do _not_ mark a
674 resulting constant with TREE_OVERFLOW if the original
675 constant was not. These conversions have implementation
676 defined behavior and retaining the TREE_OVERFLOW flag
677 here would confuse later passes such as VRP. */
678 if (CONVERT_EXPR_CODE_P (subcode)
679 && TREE_CODE (result) == INTEGER_CST
680 && TREE_CODE (rhs) == INTEGER_CST)
681 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
683 STRIP_USELESS_TYPE_CONVERSION (result);
684 if (valid_gimple_rhs_p (result))
685 return result;
687 else if (CONVERT_EXPR_CODE_P (subcode)
688 && POINTER_TYPE_P (gimple_expr_type (stmt))
689 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
691 tree type = gimple_expr_type (stmt);
692 tree t = maybe_fold_offset_to_address (loc,
693 gimple_assign_rhs1 (stmt),
694 integer_zero_node, type);
695 if (t)
696 return t;
699 break;
701 case GIMPLE_BINARY_RHS:
702 /* Try to fold pointer addition. */
703 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
705 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
706 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
708 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
709 if (!useless_type_conversion_p
710 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
711 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
713 result = maybe_fold_stmt_addition (gimple_location (stmt),
714 type,
715 gimple_assign_rhs1 (stmt),
716 gimple_assign_rhs2 (stmt));
719 if (!result)
720 result = fold_binary_loc (loc, subcode,
721 TREE_TYPE (gimple_assign_lhs (stmt)),
722 gimple_assign_rhs1 (stmt),
723 gimple_assign_rhs2 (stmt));
725 if (result)
727 STRIP_USELESS_TYPE_CONVERSION (result);
728 if (valid_gimple_rhs_p (result))
729 return result;
731 /* Fold might have produced non-GIMPLE, so if we trust it blindly
732 we lose canonicalization opportunities. Do not go again
733 through fold here though, or the same non-GIMPLE will be
734 produced. */
735 if (commutative_tree_code (subcode)
736 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
737 gimple_assign_rhs2 (stmt), false))
738 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
739 gimple_assign_rhs2 (stmt),
740 gimple_assign_rhs1 (stmt));
742 break;
744 case GIMPLE_TERNARY_RHS:
745 result = fold_ternary_loc (loc, subcode,
746 TREE_TYPE (gimple_assign_lhs (stmt)),
747 gimple_assign_rhs1 (stmt),
748 gimple_assign_rhs2 (stmt),
749 gimple_assign_rhs3 (stmt));
751 if (result)
753 STRIP_USELESS_TYPE_CONVERSION (result);
754 if (valid_gimple_rhs_p (result))
755 return result;
757 /* Fold might have produced non-GIMPLE, so if we trust it blindly
758 we lose canonicalization opportunities. Do not go again
759 through fold here though, or the same non-GIMPLE will be
760 produced. */
761 if (commutative_ternary_tree_code (subcode)
762 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
763 gimple_assign_rhs2 (stmt), false))
764 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
765 gimple_assign_rhs2 (stmt),
766 gimple_assign_rhs1 (stmt),
767 gimple_assign_rhs3 (stmt));
769 break;
771 case GIMPLE_INVALID_RHS:
772 gcc_unreachable ();
775 return NULL_TREE;
778 /* Attempt to fold a conditional statement. Return true if any changes were
779 made. We only attempt to fold the condition expression, and do not perform
780 any transformation that would require alteration of the cfg. It is
781 assumed that the operands have been previously folded. */
783 static bool
784 fold_gimple_cond (gimple stmt)
786 tree result = fold_binary_loc (gimple_location (stmt),
787 gimple_cond_code (stmt),
788 boolean_type_node,
789 gimple_cond_lhs (stmt),
790 gimple_cond_rhs (stmt));
792 if (result)
794 STRIP_USELESS_TYPE_CONVERSION (result);
795 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
797 gimple_cond_set_condition_from_tree (stmt, result);
798 return true;
802 return false;
805 /* Convert EXPR into a GIMPLE value suitable for substitution on the
806 RHS of an assignment. Insert the necessary statements before
807 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
808 is replaced. If the call is expected to produces a result, then it
809 is replaced by an assignment of the new RHS to the result variable.
810 If the result is to be ignored, then the call is replaced by a
811 GIMPLE_NOP. A proper VDEF chain is retained by making the first
812 VUSE and the last VDEF of the whole sequence be the same as the replaced
813 statement and using new SSA names for stores in between. */
815 void
816 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
818 tree lhs;
819 tree tmp = NULL_TREE; /* Silence warning. */
820 gimple stmt, new_stmt;
821 gimple_stmt_iterator i;
822 gimple_seq stmts = gimple_seq_alloc();
823 struct gimplify_ctx gctx;
824 gimple last = NULL;
825 gimple laststore = NULL;
826 tree reaching_vuse;
828 stmt = gsi_stmt (*si_p);
830 gcc_assert (is_gimple_call (stmt));
832 lhs = gimple_call_lhs (stmt);
833 reaching_vuse = gimple_vuse (stmt);
835 push_gimplify_context (&gctx);
837 if (lhs == NULL_TREE)
838 gimplify_and_add (expr, &stmts);
839 else
840 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
842 pop_gimplify_context (NULL);
844 if (gimple_has_location (stmt))
845 annotate_all_with_location (stmts, gimple_location (stmt));
847 /* The replacement can expose previously unreferenced variables. */
848 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
850 if (last)
852 gsi_insert_before (si_p, last, GSI_NEW_STMT);
853 gsi_next (si_p);
855 new_stmt = gsi_stmt (i);
856 if (gimple_in_ssa_p (cfun))
858 find_new_referenced_vars (new_stmt);
859 mark_symbols_for_renaming (new_stmt);
861 /* If the new statement has a VUSE, update it with exact SSA name we
862 know will reach this one. */
863 if (gimple_vuse (new_stmt))
865 /* If we've also seen a previous store create a new VDEF for
866 the latter one, and make that the new reaching VUSE. */
867 if (laststore)
869 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
870 gimple_set_vdef (laststore, reaching_vuse);
871 update_stmt (laststore);
872 laststore = NULL;
874 gimple_set_vuse (new_stmt, reaching_vuse);
875 gimple_set_modified (new_stmt, true);
877 if (gimple_assign_single_p (new_stmt)
878 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
880 laststore = new_stmt;
882 last = new_stmt;
885 if (lhs == NULL_TREE)
887 /* If we replace a call without LHS that has a VDEF and our new
888 sequence ends with a store we must make that store have the same
889 vdef in order not to break the sequencing. This can happen
890 for instance when folding memcpy calls into assignments. */
891 if (gimple_vdef (stmt) && laststore)
893 gimple_set_vdef (laststore, gimple_vdef (stmt));
894 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
895 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
896 update_stmt (laststore);
898 else if (gimple_in_ssa_p (cfun))
900 unlink_stmt_vdef (stmt);
901 release_defs (stmt);
903 new_stmt = last;
905 else
907 if (last)
909 gsi_insert_before (si_p, last, GSI_NEW_STMT);
910 gsi_next (si_p);
912 if (laststore && is_gimple_reg (lhs))
914 gimple_set_vdef (laststore, gimple_vdef (stmt));
915 update_stmt (laststore);
916 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
917 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
918 laststore = NULL;
920 else if (laststore)
922 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
923 gimple_set_vdef (laststore, reaching_vuse);
924 update_stmt (laststore);
925 laststore = NULL;
927 new_stmt = gimple_build_assign (lhs, tmp);
928 if (!is_gimple_reg (tmp))
929 gimple_set_vuse (new_stmt, reaching_vuse);
930 if (!is_gimple_reg (lhs))
932 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
933 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
934 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
938 gimple_set_location (new_stmt, gimple_location (stmt));
939 gsi_replace (si_p, new_stmt, false);
942 /* Return the string length, maximum string length or maximum value of
943 ARG in LENGTH.
944 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
945 is not NULL and, for TYPE == 0, its value is not equal to the length
946 we determine or if we are unable to determine the length or value,
947 return false. VISITED is a bitmap of visited variables.
948 TYPE is 0 if string length should be returned, 1 for maximum string
949 length and 2 for maximum value ARG can have. */
951 static bool
952 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
954 tree var, val;
955 gimple def_stmt;
957 if (TREE_CODE (arg) != SSA_NAME)
959 if (TREE_CODE (arg) == COND_EXPR)
960 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
961 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
962 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
963 else if (TREE_CODE (arg) == ADDR_EXPR
964 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
965 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
967 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
968 if (TREE_CODE (aop0) == INDIRECT_REF
969 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
970 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
971 length, visited, type);
974 if (type == 2)
976 val = arg;
977 if (TREE_CODE (val) != INTEGER_CST
978 || tree_int_cst_sgn (val) < 0)
979 return false;
981 else
982 val = c_strlen (arg, 1);
983 if (!val)
984 return false;
986 if (*length)
988 if (type > 0)
990 if (TREE_CODE (*length) != INTEGER_CST
991 || TREE_CODE (val) != INTEGER_CST)
992 return false;
994 if (tree_int_cst_lt (*length, val))
995 *length = val;
996 return true;
998 else if (simple_cst_equal (val, *length) != 1)
999 return false;
1002 *length = val;
1003 return true;
1006 /* If we were already here, break the infinite cycle. */
1007 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
1008 return true;
1009 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
1011 var = arg;
1012 def_stmt = SSA_NAME_DEF_STMT (var);
1014 switch (gimple_code (def_stmt))
1016 case GIMPLE_ASSIGN:
1017 /* The RHS of the statement defining VAR must either have a
1018 constant length or come from another SSA_NAME with a constant
1019 length. */
1020 if (gimple_assign_single_p (def_stmt)
1021 || gimple_assign_unary_nop_p (def_stmt))
1023 tree rhs = gimple_assign_rhs1 (def_stmt);
1024 return get_maxval_strlen (rhs, length, visited, type);
1026 return false;
1028 case GIMPLE_PHI:
1030 /* All the arguments of the PHI node must have the same constant
1031 length. */
1032 unsigned i;
1034 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1036 tree arg = gimple_phi_arg (def_stmt, i)->def;
1038 /* If this PHI has itself as an argument, we cannot
1039 determine the string length of this argument. However,
1040 if we can find a constant string length for the other
1041 PHI args then we can still be sure that this is a
1042 constant string length. So be optimistic and just
1043 continue with the next argument. */
1044 if (arg == gimple_phi_result (def_stmt))
1045 continue;
1047 if (!get_maxval_strlen (arg, length, visited, type))
1048 return false;
1051 return true;
1053 default:
1054 return false;
1059 /* Fold builtin call in statement STMT. Returns a simplified tree.
1060 We may return a non-constant expression, including another call
1061 to a different function and with different arguments, e.g.,
1062 substituting memcpy for strcpy when the string length is known.
1063 Note that some builtins expand into inline code that may not
1064 be valid in GIMPLE. Callers must take care. */
1066 tree
1067 gimple_fold_builtin (gimple stmt)
1069 tree result, val[3];
1070 tree callee, a;
1071 int arg_idx, type;
1072 bitmap visited;
1073 bool ignore;
1074 int nargs;
1075 location_t loc = gimple_location (stmt);
1077 gcc_assert (is_gimple_call (stmt));
1079 ignore = (gimple_call_lhs (stmt) == NULL);
1081 /* First try the generic builtin folder. If that succeeds, return the
1082 result directly. */
1083 result = fold_call_stmt (stmt, ignore);
1084 if (result)
1086 if (ignore)
1087 STRIP_NOPS (result);
1088 return result;
1091 /* Ignore MD builtins. */
1092 callee = gimple_call_fndecl (stmt);
1093 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1094 return NULL_TREE;
1096 /* If the builtin could not be folded, and it has no argument list,
1097 we're done. */
1098 nargs = gimple_call_num_args (stmt);
1099 if (nargs == 0)
1100 return NULL_TREE;
1102 /* Limit the work only for builtins we know how to simplify. */
1103 switch (DECL_FUNCTION_CODE (callee))
1105 case BUILT_IN_STRLEN:
1106 case BUILT_IN_FPUTS:
1107 case BUILT_IN_FPUTS_UNLOCKED:
1108 arg_idx = 0;
1109 type = 0;
1110 break;
1111 case BUILT_IN_STRCPY:
1112 case BUILT_IN_STRNCPY:
1113 arg_idx = 1;
1114 type = 0;
1115 break;
1116 case BUILT_IN_MEMCPY_CHK:
1117 case BUILT_IN_MEMPCPY_CHK:
1118 case BUILT_IN_MEMMOVE_CHK:
1119 case BUILT_IN_MEMSET_CHK:
1120 case BUILT_IN_STRNCPY_CHK:
1121 arg_idx = 2;
1122 type = 2;
1123 break;
1124 case BUILT_IN_STRCPY_CHK:
1125 case BUILT_IN_STPCPY_CHK:
1126 arg_idx = 1;
1127 type = 1;
1128 break;
1129 case BUILT_IN_SNPRINTF_CHK:
1130 case BUILT_IN_VSNPRINTF_CHK:
1131 arg_idx = 1;
1132 type = 2;
1133 break;
1134 default:
1135 return NULL_TREE;
1138 if (arg_idx >= nargs)
1139 return NULL_TREE;
1141 /* Try to use the dataflow information gathered by the CCP process. */
1142 visited = BITMAP_ALLOC (NULL);
1143 bitmap_clear (visited);
1145 memset (val, 0, sizeof (val));
1146 a = gimple_call_arg (stmt, arg_idx);
1147 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1148 val[arg_idx] = NULL_TREE;
1150 BITMAP_FREE (visited);
1152 result = NULL_TREE;
1153 switch (DECL_FUNCTION_CODE (callee))
1155 case BUILT_IN_STRLEN:
1156 if (val[0] && nargs == 1)
1158 tree new_val =
1159 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1161 /* If the result is not a valid gimple value, or not a cast
1162 of a valid gimple value, then we cannot use the result. */
1163 if (is_gimple_val (new_val)
1164 || (is_gimple_cast (new_val)
1165 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1166 return new_val;
1168 break;
1170 case BUILT_IN_STRCPY:
1171 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1172 result = fold_builtin_strcpy (loc, callee,
1173 gimple_call_arg (stmt, 0),
1174 gimple_call_arg (stmt, 1),
1175 val[1]);
1176 break;
1178 case BUILT_IN_STRNCPY:
1179 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1180 result = fold_builtin_strncpy (loc, callee,
1181 gimple_call_arg (stmt, 0),
1182 gimple_call_arg (stmt, 1),
1183 gimple_call_arg (stmt, 2),
1184 val[1]);
1185 break;
1187 case BUILT_IN_FPUTS:
1188 if (nargs == 2)
1189 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1190 gimple_call_arg (stmt, 1),
1191 ignore, false, val[0]);
1192 break;
1194 case BUILT_IN_FPUTS_UNLOCKED:
1195 if (nargs == 2)
1196 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1197 gimple_call_arg (stmt, 1),
1198 ignore, true, val[0]);
1199 break;
1201 case BUILT_IN_MEMCPY_CHK:
1202 case BUILT_IN_MEMPCPY_CHK:
1203 case BUILT_IN_MEMMOVE_CHK:
1204 case BUILT_IN_MEMSET_CHK:
1205 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1206 result = fold_builtin_memory_chk (loc, callee,
1207 gimple_call_arg (stmt, 0),
1208 gimple_call_arg (stmt, 1),
1209 gimple_call_arg (stmt, 2),
1210 gimple_call_arg (stmt, 3),
1211 val[2], ignore,
1212 DECL_FUNCTION_CODE (callee));
1213 break;
1215 case BUILT_IN_STRCPY_CHK:
1216 case BUILT_IN_STPCPY_CHK:
1217 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1218 result = fold_builtin_stxcpy_chk (loc, callee,
1219 gimple_call_arg (stmt, 0),
1220 gimple_call_arg (stmt, 1),
1221 gimple_call_arg (stmt, 2),
1222 val[1], ignore,
1223 DECL_FUNCTION_CODE (callee));
1224 break;
1226 case BUILT_IN_STRNCPY_CHK:
1227 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1228 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1229 gimple_call_arg (stmt, 1),
1230 gimple_call_arg (stmt, 2),
1231 gimple_call_arg (stmt, 3),
1232 val[2]);
1233 break;
1235 case BUILT_IN_SNPRINTF_CHK:
1236 case BUILT_IN_VSNPRINTF_CHK:
1237 if (val[1] && is_gimple_val (val[1]))
1238 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1239 DECL_FUNCTION_CODE (callee));
1240 break;
1242 default:
1243 gcc_unreachable ();
1246 if (result && ignore)
1247 result = fold_ignored_result (result);
1248 return result;
1251 /* Return the first of the base binfos of BINFO that has virtual functions. */
1253 static tree
1254 get_first_base_binfo_with_virtuals (tree binfo)
1256 int i;
1257 tree base_binfo;
1259 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1260 if (BINFO_VIRTUALS (base_binfo))
1261 return base_binfo;
1263 return NULL_TREE;
1267 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1268 it is found or NULL_TREE if it is not. */
1270 static tree
1271 get_base_binfo_for_type (tree binfo, tree type)
1273 int i;
1274 tree base_binfo;
1275 tree res = NULL_TREE;
1277 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1278 if (TREE_TYPE (base_binfo) == type)
1280 gcc_assert (!res);
1281 res = base_binfo;
1284 return res;
1287 /* Return a binfo describing the part of object referenced by expression REF.
1288 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1289 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1290 a simple declaration, indirect reference or an SSA_NAME. If the function
1291 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1292 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1293 Otherwise the first non-artificial field declaration or the base declaration
1294 will be examined to get the encapsulating type. */
1296 tree
1297 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1299 while (true)
1301 if (TREE_CODE (ref) == COMPONENT_REF)
1303 tree par_type;
1304 tree binfo, base_binfo;
1305 tree field = TREE_OPERAND (ref, 1);
1307 if (!DECL_ARTIFICIAL (field))
1309 tree type = TREE_TYPE (field);
1310 if (TREE_CODE (type) == RECORD_TYPE)
1311 return TYPE_BINFO (type);
1312 else
1313 return NULL_TREE;
1316 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1317 binfo = TYPE_BINFO (par_type);
1318 if (!binfo
1319 || BINFO_N_BASE_BINFOS (binfo) == 0)
1320 return NULL_TREE;
1322 base_binfo = get_first_base_binfo_with_virtuals (binfo);
1323 if (base_binfo && BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1325 tree d_binfo;
1327 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1328 known_binfo);
1329 /* Get descendant binfo. */
1330 if (!d_binfo)
1331 return NULL_TREE;
1332 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1335 ref = TREE_OPERAND (ref, 0);
1337 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1338 return TYPE_BINFO (TREE_TYPE (ref));
1339 else if (known_binfo
1340 && (TREE_CODE (ref) == SSA_NAME
1341 || TREE_CODE (ref) == MEM_REF))
1342 return known_binfo;
1343 else
1344 return NULL_TREE;
1348 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1349 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1350 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1352 tree
1353 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1355 HOST_WIDE_INT i;
1356 tree v, fndecl;
1357 struct cgraph_node *node;
1359 v = BINFO_VIRTUALS (known_binfo);
1360 i = 0;
1361 while (i != token)
1363 i += (TARGET_VTABLE_USES_DESCRIPTORS
1364 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1365 v = TREE_CHAIN (v);
1368 fndecl = TREE_VALUE (v);
1369 node = cgraph_get_node (fndecl);
1370 /* When cgraph node is missing and function is not public, we cannot
1371 devirtualize. This can happen in WHOPR when the actual method
1372 ends up in other partition, because we found devirtualization
1373 possibility too late. */
1374 if ((!node || (!node->analyzed && !node->in_other_partition))
1375 && (!TREE_PUBLIC (fndecl) || DECL_COMDAT (fndecl)))
1376 return NULL;
1377 return build_fold_addr_expr (fndecl);
1381 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1382 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1383 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1384 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1385 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1387 tree
1388 gimple_fold_obj_type_ref (tree ref, tree known_type)
1390 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1391 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1392 tree binfo;
1394 if (TREE_CODE (obj) == ADDR_EXPR)
1395 obj = TREE_OPERAND (obj, 0);
1397 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1398 if (binfo)
1400 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1401 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1403 else
1404 return NULL_TREE;
1407 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1408 The statement may be replaced by another statement, e.g., if the call
1409 simplifies to a constant value. Return true if any changes were made.
1410 It is assumed that the operands have been previously folded. */
1412 static bool
1413 fold_gimple_call (gimple_stmt_iterator *gsi)
1415 gimple stmt = gsi_stmt (*gsi);
1417 tree callee = gimple_call_fndecl (stmt);
1419 /* Check for builtins that CCP can handle using information not
1420 available in the generic fold routines. */
1421 if (callee && DECL_BUILT_IN (callee))
1423 tree result = gimple_fold_builtin (stmt);
1425 if (result)
1427 if (!update_call_from_tree (gsi, result))
1428 gimplify_and_update_call_from_tree (gsi, result);
1429 return true;
1432 else
1434 /* ??? Should perhaps do this in fold proper. However, doing it
1435 there requires that we create a new CALL_EXPR, and that requires
1436 copying EH region info to the new node. Easier to just do it
1437 here where we can just smash the call operand. */
1438 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1439 callee = gimple_call_fn (stmt);
1440 if (TREE_CODE (callee) == OBJ_TYPE_REF
1441 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1443 tree t;
1445 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1446 if (t)
1448 gimple_call_set_fn (stmt, t);
1449 return true;
1454 return false;
1457 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1458 distinguishes both cases. */
1460 static bool
1461 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1463 bool changed = false;
1464 gimple stmt = gsi_stmt (*gsi);
1465 unsigned i;
1467 /* Fold the main computation performed by the statement. */
1468 switch (gimple_code (stmt))
1470 case GIMPLE_ASSIGN:
1472 unsigned old_num_ops = gimple_num_ops (stmt);
1473 tree new_rhs = fold_gimple_assign (gsi);
1474 tree lhs = gimple_assign_lhs (stmt);
1475 if (new_rhs
1476 && !useless_type_conversion_p (TREE_TYPE (lhs),
1477 TREE_TYPE (new_rhs)))
1478 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1479 if (new_rhs
1480 && (!inplace
1481 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1483 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1484 changed = true;
1486 break;
1489 case GIMPLE_COND:
1490 changed |= fold_gimple_cond (stmt);
1491 break;
1493 case GIMPLE_CALL:
1494 /* Fold *& in call arguments. */
1495 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1496 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1498 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1499 if (tmp)
1501 gimple_call_set_arg (stmt, i, tmp);
1502 changed = true;
1505 /* The entire statement may be replaced in this case. */
1506 if (!inplace)
1507 changed |= fold_gimple_call (gsi);
1508 break;
1510 case GIMPLE_ASM:
1511 /* Fold *& in asm operands. */
1512 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1514 tree link = gimple_asm_output_op (stmt, i);
1515 tree op = TREE_VALUE (link);
1516 if (REFERENCE_CLASS_P (op)
1517 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1519 TREE_VALUE (link) = op;
1520 changed = true;
1523 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1525 tree link = gimple_asm_input_op (stmt, i);
1526 tree op = TREE_VALUE (link);
1527 if (REFERENCE_CLASS_P (op)
1528 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1530 TREE_VALUE (link) = op;
1531 changed = true;
1534 break;
1536 case GIMPLE_DEBUG:
1537 if (gimple_debug_bind_p (stmt))
1539 tree val = gimple_debug_bind_get_value (stmt);
1540 if (val
1541 && REFERENCE_CLASS_P (val))
1543 tree tem = maybe_fold_reference (val, false);
1544 if (tem)
1546 gimple_debug_bind_set_value (stmt, tem);
1547 changed = true;
1551 break;
1553 default:;
1556 stmt = gsi_stmt (*gsi);
1558 /* Fold *& on the lhs. */
1559 if (gimple_has_lhs (stmt))
1561 tree lhs = gimple_get_lhs (stmt);
1562 if (lhs && REFERENCE_CLASS_P (lhs))
1564 tree new_lhs = maybe_fold_reference (lhs, true);
1565 if (new_lhs)
1567 gimple_set_lhs (stmt, new_lhs);
1568 changed = true;
1573 return changed;
1576 /* Fold the statement pointed to by GSI. In some cases, this function may
1577 replace the whole statement with a new one. Returns true iff folding
1578 makes any changes.
1579 The statement pointed to by GSI should be in valid gimple form but may
1580 be in unfolded state as resulting from for example constant propagation
1581 which can produce *&x = 0. */
1583 bool
1584 fold_stmt (gimple_stmt_iterator *gsi)
1586 return fold_stmt_1 (gsi, false);
1589 /* Perform the minimal folding on statement STMT. Only operations like
1590 *&x created by constant propagation are handled. The statement cannot
1591 be replaced with a new one. Return true if the statement was
1592 changed, false otherwise.
1593 The statement STMT should be in valid gimple form but may
1594 be in unfolded state as resulting from for example constant propagation
1595 which can produce *&x = 0. */
1597 bool
1598 fold_stmt_inplace (gimple stmt)
1600 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1601 bool changed = fold_stmt_1 (&gsi, true);
1602 gcc_assert (gsi_stmt (gsi) == stmt);
1603 return changed;
1606 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1607 if EXPR is null or we don't know how.
1608 If non-null, the result always has boolean type. */
1610 static tree
1611 canonicalize_bool (tree expr, bool invert)
1613 if (!expr)
1614 return NULL_TREE;
1615 else if (invert)
1617 if (integer_nonzerop (expr))
1618 return boolean_false_node;
1619 else if (integer_zerop (expr))
1620 return boolean_true_node;
1621 else if (TREE_CODE (expr) == SSA_NAME)
1622 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1623 build_int_cst (TREE_TYPE (expr), 0));
1624 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1625 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1626 boolean_type_node,
1627 TREE_OPERAND (expr, 0),
1628 TREE_OPERAND (expr, 1));
1629 else
1630 return NULL_TREE;
1632 else
1634 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1635 return expr;
1636 if (integer_nonzerop (expr))
1637 return boolean_true_node;
1638 else if (integer_zerop (expr))
1639 return boolean_false_node;
1640 else if (TREE_CODE (expr) == SSA_NAME)
1641 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1642 build_int_cst (TREE_TYPE (expr), 0));
1643 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1644 return fold_build2 (TREE_CODE (expr),
1645 boolean_type_node,
1646 TREE_OPERAND (expr, 0),
1647 TREE_OPERAND (expr, 1));
1648 else
1649 return NULL_TREE;
1653 /* Check to see if a boolean expression EXPR is logically equivalent to the
1654 comparison (OP1 CODE OP2). Check for various identities involving
1655 SSA_NAMEs. */
1657 static bool
1658 same_bool_comparison_p (const_tree expr, enum tree_code code,
1659 const_tree op1, const_tree op2)
1661 gimple s;
1663 /* The obvious case. */
1664 if (TREE_CODE (expr) == code
1665 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1666 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1667 return true;
1669 /* Check for comparing (name, name != 0) and the case where expr
1670 is an SSA_NAME with a definition matching the comparison. */
1671 if (TREE_CODE (expr) == SSA_NAME
1672 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1674 if (operand_equal_p (expr, op1, 0))
1675 return ((code == NE_EXPR && integer_zerop (op2))
1676 || (code == EQ_EXPR && integer_nonzerop (op2)));
1677 s = SSA_NAME_DEF_STMT (expr);
1678 if (is_gimple_assign (s)
1679 && gimple_assign_rhs_code (s) == code
1680 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1681 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1682 return true;
1685 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1686 of name is a comparison, recurse. */
1687 if (TREE_CODE (op1) == SSA_NAME
1688 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1690 s = SSA_NAME_DEF_STMT (op1);
1691 if (is_gimple_assign (s)
1692 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1694 enum tree_code c = gimple_assign_rhs_code (s);
1695 if ((c == NE_EXPR && integer_zerop (op2))
1696 || (c == EQ_EXPR && integer_nonzerop (op2)))
1697 return same_bool_comparison_p (expr, c,
1698 gimple_assign_rhs1 (s),
1699 gimple_assign_rhs2 (s));
1700 if ((c == EQ_EXPR && integer_zerop (op2))
1701 || (c == NE_EXPR && integer_nonzerop (op2)))
1702 return same_bool_comparison_p (expr,
1703 invert_tree_comparison (c, false),
1704 gimple_assign_rhs1 (s),
1705 gimple_assign_rhs2 (s));
1708 return false;
1711 /* Check to see if two boolean expressions OP1 and OP2 are logically
1712 equivalent. */
1714 static bool
1715 same_bool_result_p (const_tree op1, const_tree op2)
1717 /* Simple cases first. */
1718 if (operand_equal_p (op1, op2, 0))
1719 return true;
1721 /* Check the cases where at least one of the operands is a comparison.
1722 These are a bit smarter than operand_equal_p in that they apply some
1723 identifies on SSA_NAMEs. */
1724 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1725 && same_bool_comparison_p (op1, TREE_CODE (op2),
1726 TREE_OPERAND (op2, 0),
1727 TREE_OPERAND (op2, 1)))
1728 return true;
1729 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1730 && same_bool_comparison_p (op2, TREE_CODE (op1),
1731 TREE_OPERAND (op1, 0),
1732 TREE_OPERAND (op1, 1)))
1733 return true;
1735 /* Default case. */
1736 return false;
1739 /* Forward declarations for some mutually recursive functions. */
1741 static tree
1742 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1743 enum tree_code code2, tree op2a, tree op2b);
1744 static tree
1745 and_var_with_comparison (tree var, bool invert,
1746 enum tree_code code2, tree op2a, tree op2b);
1747 static tree
1748 and_var_with_comparison_1 (gimple stmt,
1749 enum tree_code code2, tree op2a, tree op2b);
1750 static tree
1751 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1752 enum tree_code code2, tree op2a, tree op2b);
1753 static tree
1754 or_var_with_comparison (tree var, bool invert,
1755 enum tree_code code2, tree op2a, tree op2b);
1756 static tree
1757 or_var_with_comparison_1 (gimple stmt,
1758 enum tree_code code2, tree op2a, tree op2b);
1760 /* Helper function for and_comparisons_1: try to simplify the AND of the
1761 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1762 If INVERT is true, invert the value of the VAR before doing the AND.
1763 Return NULL_EXPR if we can't simplify this to a single expression. */
1765 static tree
1766 and_var_with_comparison (tree var, bool invert,
1767 enum tree_code code2, tree op2a, tree op2b)
1769 tree t;
1770 gimple stmt = SSA_NAME_DEF_STMT (var);
1772 /* We can only deal with variables whose definitions are assignments. */
1773 if (!is_gimple_assign (stmt))
1774 return NULL_TREE;
1776 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1777 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1778 Then we only have to consider the simpler non-inverted cases. */
1779 if (invert)
1780 t = or_var_with_comparison_1 (stmt,
1781 invert_tree_comparison (code2, false),
1782 op2a, op2b);
1783 else
1784 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1785 return canonicalize_bool (t, invert);
1788 /* Try to simplify the AND of the ssa variable defined by the assignment
1789 STMT with the comparison specified by (OP2A CODE2 OP2B).
1790 Return NULL_EXPR if we can't simplify this to a single expression. */
1792 static tree
1793 and_var_with_comparison_1 (gimple stmt,
1794 enum tree_code code2, tree op2a, tree op2b)
1796 tree var = gimple_assign_lhs (stmt);
1797 tree true_test_var = NULL_TREE;
1798 tree false_test_var = NULL_TREE;
1799 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1801 /* Check for identities like (var AND (var == 0)) => false. */
1802 if (TREE_CODE (op2a) == SSA_NAME
1803 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1805 if ((code2 == NE_EXPR && integer_zerop (op2b))
1806 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1808 true_test_var = op2a;
1809 if (var == true_test_var)
1810 return var;
1812 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1813 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1815 false_test_var = op2a;
1816 if (var == false_test_var)
1817 return boolean_false_node;
1821 /* If the definition is a comparison, recurse on it. */
1822 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1824 tree t = and_comparisons_1 (innercode,
1825 gimple_assign_rhs1 (stmt),
1826 gimple_assign_rhs2 (stmt),
1827 code2,
1828 op2a,
1829 op2b);
1830 if (t)
1831 return t;
1834 /* If the definition is an AND or OR expression, we may be able to
1835 simplify by reassociating. */
1836 if (innercode == TRUTH_AND_EXPR
1837 || innercode == TRUTH_OR_EXPR
1838 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1839 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1841 tree inner1 = gimple_assign_rhs1 (stmt);
1842 tree inner2 = gimple_assign_rhs2 (stmt);
1843 gimple s;
1844 tree t;
1845 tree partial = NULL_TREE;
1846 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1848 /* Check for boolean identities that don't require recursive examination
1849 of inner1/inner2:
1850 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1851 inner1 AND (inner1 OR inner2) => inner1
1852 !inner1 AND (inner1 AND inner2) => false
1853 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1854 Likewise for similar cases involving inner2. */
1855 if (inner1 == true_test_var)
1856 return (is_and ? var : inner1);
1857 else if (inner2 == true_test_var)
1858 return (is_and ? var : inner2);
1859 else if (inner1 == false_test_var)
1860 return (is_and
1861 ? boolean_false_node
1862 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1863 else if (inner2 == false_test_var)
1864 return (is_and
1865 ? boolean_false_node
1866 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1868 /* Next, redistribute/reassociate the AND across the inner tests.
1869 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1870 if (TREE_CODE (inner1) == SSA_NAME
1871 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1872 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1873 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1874 gimple_assign_rhs1 (s),
1875 gimple_assign_rhs2 (s),
1876 code2, op2a, op2b)))
1878 /* Handle the AND case, where we are reassociating:
1879 (inner1 AND inner2) AND (op2a code2 op2b)
1880 => (t AND inner2)
1881 If the partial result t is a constant, we win. Otherwise
1882 continue on to try reassociating with the other inner test. */
1883 if (is_and)
1885 if (integer_onep (t))
1886 return inner2;
1887 else if (integer_zerop (t))
1888 return boolean_false_node;
1891 /* Handle the OR case, where we are redistributing:
1892 (inner1 OR inner2) AND (op2a code2 op2b)
1893 => (t OR (inner2 AND (op2a code2 op2b))) */
1894 else
1896 if (integer_onep (t))
1897 return boolean_true_node;
1898 else
1899 /* Save partial result for later. */
1900 partial = t;
1904 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1905 if (TREE_CODE (inner2) == SSA_NAME
1906 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1907 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1908 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1909 gimple_assign_rhs1 (s),
1910 gimple_assign_rhs2 (s),
1911 code2, op2a, op2b)))
1913 /* Handle the AND case, where we are reassociating:
1914 (inner1 AND inner2) AND (op2a code2 op2b)
1915 => (inner1 AND t) */
1916 if (is_and)
1918 if (integer_onep (t))
1919 return inner1;
1920 else if (integer_zerop (t))
1921 return boolean_false_node;
1924 /* Handle the OR case. where we are redistributing:
1925 (inner1 OR inner2) AND (op2a code2 op2b)
1926 => (t OR (inner1 AND (op2a code2 op2b)))
1927 => (t OR partial) */
1928 else
1930 if (integer_onep (t))
1931 return boolean_true_node;
1932 else if (partial)
1934 /* We already got a simplification for the other
1935 operand to the redistributed OR expression. The
1936 interesting case is when at least one is false.
1937 Or, if both are the same, we can apply the identity
1938 (x OR x) == x. */
1939 if (integer_zerop (partial))
1940 return t;
1941 else if (integer_zerop (t))
1942 return partial;
1943 else if (same_bool_result_p (t, partial))
1944 return t;
1949 return NULL_TREE;
1952 /* Try to simplify the AND of two comparisons defined by
1953 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1954 If this can be done without constructing an intermediate value,
1955 return the resulting tree; otherwise NULL_TREE is returned.
1956 This function is deliberately asymmetric as it recurses on SSA_DEFs
1957 in the first comparison but not the second. */
1959 static tree
1960 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1961 enum tree_code code2, tree op2a, tree op2b)
1963 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1964 if (operand_equal_p (op1a, op2a, 0)
1965 && operand_equal_p (op1b, op2b, 0))
1967 tree t = combine_comparisons (UNKNOWN_LOCATION,
1968 TRUTH_ANDIF_EXPR, code1, code2,
1969 boolean_type_node, op1a, op1b);
1970 if (t)
1971 return t;
1974 /* Likewise the swapped case of the above. */
1975 if (operand_equal_p (op1a, op2b, 0)
1976 && operand_equal_p (op1b, op2a, 0))
1978 tree t = combine_comparisons (UNKNOWN_LOCATION,
1979 TRUTH_ANDIF_EXPR, code1,
1980 swap_tree_comparison (code2),
1981 boolean_type_node, op1a, op1b);
1982 if (t)
1983 return t;
1986 /* If both comparisons are of the same value against constants, we might
1987 be able to merge them. */
1988 if (operand_equal_p (op1a, op2a, 0)
1989 && TREE_CODE (op1b) == INTEGER_CST
1990 && TREE_CODE (op2b) == INTEGER_CST)
1992 int cmp = tree_int_cst_compare (op1b, op2b);
1994 /* If we have (op1a == op1b), we should either be able to
1995 return that or FALSE, depending on whether the constant op1b
1996 also satisfies the other comparison against op2b. */
1997 if (code1 == EQ_EXPR)
1999 bool done = true;
2000 bool val;
2001 switch (code2)
2003 case EQ_EXPR: val = (cmp == 0); break;
2004 case NE_EXPR: val = (cmp != 0); break;
2005 case LT_EXPR: val = (cmp < 0); break;
2006 case GT_EXPR: val = (cmp > 0); break;
2007 case LE_EXPR: val = (cmp <= 0); break;
2008 case GE_EXPR: val = (cmp >= 0); break;
2009 default: done = false;
2011 if (done)
2013 if (val)
2014 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2015 else
2016 return boolean_false_node;
2019 /* Likewise if the second comparison is an == comparison. */
2020 else if (code2 == EQ_EXPR)
2022 bool done = true;
2023 bool val;
2024 switch (code1)
2026 case EQ_EXPR: val = (cmp == 0); break;
2027 case NE_EXPR: val = (cmp != 0); break;
2028 case LT_EXPR: val = (cmp > 0); break;
2029 case GT_EXPR: val = (cmp < 0); break;
2030 case LE_EXPR: val = (cmp >= 0); break;
2031 case GE_EXPR: val = (cmp <= 0); break;
2032 default: done = false;
2034 if (done)
2036 if (val)
2037 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2038 else
2039 return boolean_false_node;
2043 /* Same business with inequality tests. */
2044 else if (code1 == NE_EXPR)
2046 bool val;
2047 switch (code2)
2049 case EQ_EXPR: val = (cmp != 0); break;
2050 case NE_EXPR: val = (cmp == 0); break;
2051 case LT_EXPR: val = (cmp >= 0); break;
2052 case GT_EXPR: val = (cmp <= 0); break;
2053 case LE_EXPR: val = (cmp > 0); break;
2054 case GE_EXPR: val = (cmp < 0); break;
2055 default:
2056 val = false;
2058 if (val)
2059 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2061 else if (code2 == NE_EXPR)
2063 bool val;
2064 switch (code1)
2066 case EQ_EXPR: val = (cmp == 0); break;
2067 case NE_EXPR: val = (cmp != 0); break;
2068 case LT_EXPR: val = (cmp <= 0); break;
2069 case GT_EXPR: val = (cmp >= 0); break;
2070 case LE_EXPR: val = (cmp < 0); break;
2071 case GE_EXPR: val = (cmp > 0); break;
2072 default:
2073 val = false;
2075 if (val)
2076 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2079 /* Chose the more restrictive of two < or <= comparisons. */
2080 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2081 && (code2 == LT_EXPR || code2 == LE_EXPR))
2083 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2084 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2085 else
2086 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2089 /* Likewise chose the more restrictive of two > or >= comparisons. */
2090 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2091 && (code2 == GT_EXPR || code2 == GE_EXPR))
2093 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2094 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2095 else
2096 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2099 /* Check for singleton ranges. */
2100 else if (cmp == 0
2101 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2102 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2103 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2105 /* Check for disjoint ranges. */
2106 else if (cmp <= 0
2107 && (code1 == LT_EXPR || code1 == LE_EXPR)
2108 && (code2 == GT_EXPR || code2 == GE_EXPR))
2109 return boolean_false_node;
2110 else if (cmp >= 0
2111 && (code1 == GT_EXPR || code1 == GE_EXPR)
2112 && (code2 == LT_EXPR || code2 == LE_EXPR))
2113 return boolean_false_node;
2116 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2117 NAME's definition is a truth value. See if there are any simplifications
2118 that can be done against the NAME's definition. */
2119 if (TREE_CODE (op1a) == SSA_NAME
2120 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2121 && (integer_zerop (op1b) || integer_onep (op1b)))
2123 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2124 || (code1 == NE_EXPR && integer_onep (op1b)));
2125 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2126 switch (gimple_code (stmt))
2128 case GIMPLE_ASSIGN:
2129 /* Try to simplify by copy-propagating the definition. */
2130 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2132 case GIMPLE_PHI:
2133 /* If every argument to the PHI produces the same result when
2134 ANDed with the second comparison, we win.
2135 Do not do this unless the type is bool since we need a bool
2136 result here anyway. */
2137 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2139 tree result = NULL_TREE;
2140 unsigned i;
2141 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2143 tree arg = gimple_phi_arg_def (stmt, i);
2145 /* If this PHI has itself as an argument, ignore it.
2146 If all the other args produce the same result,
2147 we're still OK. */
2148 if (arg == gimple_phi_result (stmt))
2149 continue;
2150 else if (TREE_CODE (arg) == INTEGER_CST)
2152 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2154 if (!result)
2155 result = boolean_false_node;
2156 else if (!integer_zerop (result))
2157 return NULL_TREE;
2159 else if (!result)
2160 result = fold_build2 (code2, boolean_type_node,
2161 op2a, op2b);
2162 else if (!same_bool_comparison_p (result,
2163 code2, op2a, op2b))
2164 return NULL_TREE;
2166 else if (TREE_CODE (arg) == SSA_NAME)
2168 tree temp = and_var_with_comparison (arg, invert,
2169 code2, op2a, op2b);
2170 if (!temp)
2171 return NULL_TREE;
2172 else if (!result)
2173 result = temp;
2174 else if (!same_bool_result_p (result, temp))
2175 return NULL_TREE;
2177 else
2178 return NULL_TREE;
2180 return result;
2183 default:
2184 break;
2187 return NULL_TREE;
2190 /* Try to simplify the AND of two comparisons, specified by
2191 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2192 If this can be simplified to a single expression (without requiring
2193 introducing more SSA variables to hold intermediate values),
2194 return the resulting tree. Otherwise return NULL_TREE.
2195 If the result expression is non-null, it has boolean type. */
2197 tree
2198 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2199 enum tree_code code2, tree op2a, tree op2b)
2201 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2202 if (t)
2203 return t;
2204 else
2205 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2208 /* Helper function for or_comparisons_1: try to simplify the OR of the
2209 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2210 If INVERT is true, invert the value of VAR before doing the OR.
2211 Return NULL_EXPR if we can't simplify this to a single expression. */
2213 static tree
2214 or_var_with_comparison (tree var, bool invert,
2215 enum tree_code code2, tree op2a, tree op2b)
2217 tree t;
2218 gimple stmt = SSA_NAME_DEF_STMT (var);
2220 /* We can only deal with variables whose definitions are assignments. */
2221 if (!is_gimple_assign (stmt))
2222 return NULL_TREE;
2224 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2225 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2226 Then we only have to consider the simpler non-inverted cases. */
2227 if (invert)
2228 t = and_var_with_comparison_1 (stmt,
2229 invert_tree_comparison (code2, false),
2230 op2a, op2b);
2231 else
2232 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2233 return canonicalize_bool (t, invert);
2236 /* Try to simplify the OR of the ssa variable defined by the assignment
2237 STMT with the comparison specified by (OP2A CODE2 OP2B).
2238 Return NULL_EXPR if we can't simplify this to a single expression. */
2240 static tree
2241 or_var_with_comparison_1 (gimple stmt,
2242 enum tree_code code2, tree op2a, tree op2b)
2244 tree var = gimple_assign_lhs (stmt);
2245 tree true_test_var = NULL_TREE;
2246 tree false_test_var = NULL_TREE;
2247 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2249 /* Check for identities like (var OR (var != 0)) => true . */
2250 if (TREE_CODE (op2a) == SSA_NAME
2251 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2253 if ((code2 == NE_EXPR && integer_zerop (op2b))
2254 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2256 true_test_var = op2a;
2257 if (var == true_test_var)
2258 return var;
2260 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2261 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2263 false_test_var = op2a;
2264 if (var == false_test_var)
2265 return boolean_true_node;
2269 /* If the definition is a comparison, recurse on it. */
2270 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2272 tree t = or_comparisons_1 (innercode,
2273 gimple_assign_rhs1 (stmt),
2274 gimple_assign_rhs2 (stmt),
2275 code2,
2276 op2a,
2277 op2b);
2278 if (t)
2279 return t;
2282 /* If the definition is an AND or OR expression, we may be able to
2283 simplify by reassociating. */
2284 if (innercode == TRUTH_AND_EXPR
2285 || innercode == TRUTH_OR_EXPR
2286 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2287 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2289 tree inner1 = gimple_assign_rhs1 (stmt);
2290 tree inner2 = gimple_assign_rhs2 (stmt);
2291 gimple s;
2292 tree t;
2293 tree partial = NULL_TREE;
2294 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2296 /* Check for boolean identities that don't require recursive examination
2297 of inner1/inner2:
2298 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2299 inner1 OR (inner1 AND inner2) => inner1
2300 !inner1 OR (inner1 OR inner2) => true
2301 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2303 if (inner1 == true_test_var)
2304 return (is_or ? var : inner1);
2305 else if (inner2 == true_test_var)
2306 return (is_or ? var : inner2);
2307 else if (inner1 == false_test_var)
2308 return (is_or
2309 ? boolean_true_node
2310 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2311 else if (inner2 == false_test_var)
2312 return (is_or
2313 ? boolean_true_node
2314 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2316 /* Next, redistribute/reassociate the OR across the inner tests.
2317 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2318 if (TREE_CODE (inner1) == SSA_NAME
2319 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2320 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2321 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2322 gimple_assign_rhs1 (s),
2323 gimple_assign_rhs2 (s),
2324 code2, op2a, op2b)))
2326 /* Handle the OR case, where we are reassociating:
2327 (inner1 OR inner2) OR (op2a code2 op2b)
2328 => (t OR inner2)
2329 If the partial result t is a constant, we win. Otherwise
2330 continue on to try reassociating with the other inner test. */
2331 if (innercode == TRUTH_OR_EXPR)
2333 if (integer_onep (t))
2334 return boolean_true_node;
2335 else if (integer_zerop (t))
2336 return inner2;
2339 /* Handle the AND case, where we are redistributing:
2340 (inner1 AND inner2) OR (op2a code2 op2b)
2341 => (t AND (inner2 OR (op2a code op2b))) */
2342 else
2344 if (integer_zerop (t))
2345 return boolean_false_node;
2346 else
2347 /* Save partial result for later. */
2348 partial = t;
2352 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2353 if (TREE_CODE (inner2) == SSA_NAME
2354 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2355 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2356 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2357 gimple_assign_rhs1 (s),
2358 gimple_assign_rhs2 (s),
2359 code2, op2a, op2b)))
2361 /* Handle the OR case, where we are reassociating:
2362 (inner1 OR inner2) OR (op2a code2 op2b)
2363 => (inner1 OR t) */
2364 if (innercode == TRUTH_OR_EXPR)
2366 if (integer_zerop (t))
2367 return inner1;
2368 else if (integer_onep (t))
2369 return boolean_true_node;
2372 /* Handle the AND case, where we are redistributing:
2373 (inner1 AND inner2) OR (op2a code2 op2b)
2374 => (t AND (inner1 OR (op2a code2 op2b)))
2375 => (t AND partial) */
2376 else
2378 if (integer_zerop (t))
2379 return boolean_false_node;
2380 else if (partial)
2382 /* We already got a simplification for the other
2383 operand to the redistributed AND expression. The
2384 interesting case is when at least one is true.
2385 Or, if both are the same, we can apply the identity
2386 (x AND x) == true. */
2387 if (integer_onep (partial))
2388 return t;
2389 else if (integer_onep (t))
2390 return partial;
2391 else if (same_bool_result_p (t, partial))
2392 return boolean_true_node;
2397 return NULL_TREE;
2400 /* Try to simplify the OR of two comparisons defined by
2401 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2402 If this can be done without constructing an intermediate value,
2403 return the resulting tree; otherwise NULL_TREE is returned.
2404 This function is deliberately asymmetric as it recurses on SSA_DEFs
2405 in the first comparison but not the second. */
2407 static tree
2408 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2409 enum tree_code code2, tree op2a, tree op2b)
2411 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2412 if (operand_equal_p (op1a, op2a, 0)
2413 && operand_equal_p (op1b, op2b, 0))
2415 tree t = combine_comparisons (UNKNOWN_LOCATION,
2416 TRUTH_ORIF_EXPR, code1, code2,
2417 boolean_type_node, op1a, op1b);
2418 if (t)
2419 return t;
2422 /* Likewise the swapped case of the above. */
2423 if (operand_equal_p (op1a, op2b, 0)
2424 && operand_equal_p (op1b, op2a, 0))
2426 tree t = combine_comparisons (UNKNOWN_LOCATION,
2427 TRUTH_ORIF_EXPR, code1,
2428 swap_tree_comparison (code2),
2429 boolean_type_node, op1a, op1b);
2430 if (t)
2431 return t;
2434 /* If both comparisons are of the same value against constants, we might
2435 be able to merge them. */
2436 if (operand_equal_p (op1a, op2a, 0)
2437 && TREE_CODE (op1b) == INTEGER_CST
2438 && TREE_CODE (op2b) == INTEGER_CST)
2440 int cmp = tree_int_cst_compare (op1b, op2b);
2442 /* If we have (op1a != op1b), we should either be able to
2443 return that or TRUE, depending on whether the constant op1b
2444 also satisfies the other comparison against op2b. */
2445 if (code1 == NE_EXPR)
2447 bool done = true;
2448 bool val;
2449 switch (code2)
2451 case EQ_EXPR: val = (cmp == 0); break;
2452 case NE_EXPR: val = (cmp != 0); break;
2453 case LT_EXPR: val = (cmp < 0); break;
2454 case GT_EXPR: val = (cmp > 0); break;
2455 case LE_EXPR: val = (cmp <= 0); break;
2456 case GE_EXPR: val = (cmp >= 0); break;
2457 default: done = false;
2459 if (done)
2461 if (val)
2462 return boolean_true_node;
2463 else
2464 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2467 /* Likewise if the second comparison is a != comparison. */
2468 else if (code2 == NE_EXPR)
2470 bool done = true;
2471 bool val;
2472 switch (code1)
2474 case EQ_EXPR: val = (cmp == 0); break;
2475 case NE_EXPR: val = (cmp != 0); break;
2476 case LT_EXPR: val = (cmp > 0); break;
2477 case GT_EXPR: val = (cmp < 0); break;
2478 case LE_EXPR: val = (cmp >= 0); break;
2479 case GE_EXPR: val = (cmp <= 0); break;
2480 default: done = false;
2482 if (done)
2484 if (val)
2485 return boolean_true_node;
2486 else
2487 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2491 /* See if an equality test is redundant with the other comparison. */
2492 else if (code1 == EQ_EXPR)
2494 bool val;
2495 switch (code2)
2497 case EQ_EXPR: val = (cmp == 0); break;
2498 case NE_EXPR: val = (cmp != 0); break;
2499 case LT_EXPR: val = (cmp < 0); break;
2500 case GT_EXPR: val = (cmp > 0); break;
2501 case LE_EXPR: val = (cmp <= 0); break;
2502 case GE_EXPR: val = (cmp >= 0); break;
2503 default:
2504 val = false;
2506 if (val)
2507 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2509 else if (code2 == EQ_EXPR)
2511 bool val;
2512 switch (code1)
2514 case EQ_EXPR: val = (cmp == 0); break;
2515 case NE_EXPR: val = (cmp != 0); break;
2516 case LT_EXPR: val = (cmp > 0); break;
2517 case GT_EXPR: val = (cmp < 0); break;
2518 case LE_EXPR: val = (cmp >= 0); break;
2519 case GE_EXPR: val = (cmp <= 0); break;
2520 default:
2521 val = false;
2523 if (val)
2524 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2527 /* Chose the less restrictive of two < or <= comparisons. */
2528 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2529 && (code2 == LT_EXPR || code2 == LE_EXPR))
2531 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2532 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2533 else
2534 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2537 /* Likewise chose the less restrictive of two > or >= comparisons. */
2538 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2539 && (code2 == GT_EXPR || code2 == GE_EXPR))
2541 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2542 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2543 else
2544 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2547 /* Check for singleton ranges. */
2548 else if (cmp == 0
2549 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2550 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2551 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2553 /* Check for less/greater pairs that don't restrict the range at all. */
2554 else if (cmp >= 0
2555 && (code1 == LT_EXPR || code1 == LE_EXPR)
2556 && (code2 == GT_EXPR || code2 == GE_EXPR))
2557 return boolean_true_node;
2558 else if (cmp <= 0
2559 && (code1 == GT_EXPR || code1 == GE_EXPR)
2560 && (code2 == LT_EXPR || code2 == LE_EXPR))
2561 return boolean_true_node;
2564 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2565 NAME's definition is a truth value. See if there are any simplifications
2566 that can be done against the NAME's definition. */
2567 if (TREE_CODE (op1a) == SSA_NAME
2568 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2569 && (integer_zerop (op1b) || integer_onep (op1b)))
2571 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2572 || (code1 == NE_EXPR && integer_onep (op1b)));
2573 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2574 switch (gimple_code (stmt))
2576 case GIMPLE_ASSIGN:
2577 /* Try to simplify by copy-propagating the definition. */
2578 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2580 case GIMPLE_PHI:
2581 /* If every argument to the PHI produces the same result when
2582 ORed with the second comparison, we win.
2583 Do not do this unless the type is bool since we need a bool
2584 result here anyway. */
2585 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2587 tree result = NULL_TREE;
2588 unsigned i;
2589 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2591 tree arg = gimple_phi_arg_def (stmt, i);
2593 /* If this PHI has itself as an argument, ignore it.
2594 If all the other args produce the same result,
2595 we're still OK. */
2596 if (arg == gimple_phi_result (stmt))
2597 continue;
2598 else if (TREE_CODE (arg) == INTEGER_CST)
2600 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2602 if (!result)
2603 result = boolean_true_node;
2604 else if (!integer_onep (result))
2605 return NULL_TREE;
2607 else if (!result)
2608 result = fold_build2 (code2, boolean_type_node,
2609 op2a, op2b);
2610 else if (!same_bool_comparison_p (result,
2611 code2, op2a, op2b))
2612 return NULL_TREE;
2614 else if (TREE_CODE (arg) == SSA_NAME)
2616 tree temp = or_var_with_comparison (arg, invert,
2617 code2, op2a, op2b);
2618 if (!temp)
2619 return NULL_TREE;
2620 else if (!result)
2621 result = temp;
2622 else if (!same_bool_result_p (result, temp))
2623 return NULL_TREE;
2625 else
2626 return NULL_TREE;
2628 return result;
2631 default:
2632 break;
2635 return NULL_TREE;
2638 /* Try to simplify the OR of two comparisons, specified by
2639 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2640 If this can be simplified to a single expression (without requiring
2641 introducing more SSA variables to hold intermediate values),
2642 return the resulting tree. Otherwise return NULL_TREE.
2643 If the result expression is non-null, it has boolean type. */
2645 tree
2646 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2647 enum tree_code code2, tree op2a, tree op2b)
2649 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2650 if (t)
2651 return t;
2652 else
2653 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);