* gcc.target/powerpc/altivec-volatile.c: Adjust expected warning.
[official-gcc.git] / gcc / gimple-fold.c
blob5bf82822a903f405abea2b47b1ec59cbe0d36823
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
35 /* If SYM is a constant variable with known value, return the value.
36 NULL_TREE is returned otherwise. */
38 tree
39 get_symbol_constant_value (tree sym)
41 if (TREE_STATIC (sym)
42 && (TREE_READONLY (sym)
43 || TREE_CODE (sym) == CONST_DECL))
45 tree val = DECL_INITIAL (sym);
46 if (val)
48 STRIP_NOPS (val);
49 if (is_gimple_min_invariant (val))
51 if (TREE_CODE (val) == ADDR_EXPR)
53 tree base = get_base_address (TREE_OPERAND (val, 0));
54 if (base && TREE_CODE (base) == VAR_DECL)
56 TREE_ADDRESSABLE (base) = 1;
57 if (gimple_referenced_vars (cfun))
58 add_referenced_var (base);
61 return val;
64 /* Variables declared 'const' without an initializer
65 have zero as the initializer if they may not be
66 overridden at link or run time. */
67 if (!val
68 && !DECL_EXTERNAL (sym)
69 && targetm.binds_local_p (sym)
70 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
71 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
72 return fold_convert (TREE_TYPE (sym), integer_zero_node);
75 return NULL_TREE;
79 /* Return true if we may propagate the address expression ADDR into the
80 dereference DEREF and cancel them. */
82 bool
83 may_propagate_address_into_dereference (tree addr, tree deref)
85 gcc_assert (TREE_CODE (deref) == MEM_REF
86 && TREE_CODE (addr) == ADDR_EXPR);
88 /* Don't propagate if ADDR's operand has incomplete type. */
89 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
90 return false;
92 /* If the address is invariant then we do not need to preserve restrict
93 qualifications. But we do need to preserve volatile qualifiers until
94 we can annotate the folded dereference itself properly. */
95 if (is_gimple_min_invariant (addr)
96 && (!TREE_THIS_VOLATILE (deref)
97 || TYPE_VOLATILE (TREE_TYPE (addr))))
98 return useless_type_conversion_p (TREE_TYPE (deref),
99 TREE_TYPE (TREE_OPERAND (addr, 0)));
101 /* Else both the address substitution and the folding must result in
102 a valid useless type conversion sequence. */
103 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
104 TREE_TYPE (addr))
105 && useless_type_conversion_p (TREE_TYPE (deref),
106 TREE_TYPE (TREE_OPERAND (addr, 0))));
110 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
111 BASE is an array type. OFFSET is a byte displacement.
113 LOC is the location of the original expression. */
115 static tree
116 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
118 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
119 tree array_type, elt_type, elt_size;
120 tree domain_type;
122 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
123 measured in units of the size of elements type) from that ARRAY_REF).
124 We can't do anything if either is variable.
126 The case we handle here is *(&A[N]+O). */
127 if (TREE_CODE (base) == ARRAY_REF)
129 tree low_bound = array_ref_low_bound (base);
131 elt_offset = TREE_OPERAND (base, 1);
132 if (TREE_CODE (low_bound) != INTEGER_CST
133 || TREE_CODE (elt_offset) != INTEGER_CST)
134 return NULL_TREE;
136 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
137 base = TREE_OPERAND (base, 0);
140 /* Ignore stupid user tricks of indexing non-array variables. */
141 array_type = TREE_TYPE (base);
142 if (TREE_CODE (array_type) != ARRAY_TYPE)
143 return NULL_TREE;
144 elt_type = TREE_TYPE (array_type);
146 /* Use signed size type for intermediate computation on the index. */
147 idx_type = ssizetype;
149 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
150 element type (so we can use the alignment if it's not constant).
151 Otherwise, compute the offset as an index by using a division. If the
152 division isn't exact, then don't do anything. */
153 elt_size = TYPE_SIZE_UNIT (elt_type);
154 if (!elt_size)
155 return NULL;
156 if (integer_zerop (offset))
158 if (TREE_CODE (elt_size) != INTEGER_CST)
159 elt_size = size_int (TYPE_ALIGN (elt_type));
161 idx = build_int_cst (idx_type, 0);
163 else
165 unsigned HOST_WIDE_INT lquo, lrem;
166 HOST_WIDE_INT hquo, hrem;
167 double_int soffset;
169 /* The final array offset should be signed, so we need
170 to sign-extend the (possibly pointer) offset here
171 and use signed division. */
172 soffset = double_int_sext (tree_to_double_int (offset),
173 TYPE_PRECISION (TREE_TYPE (offset)));
174 if (TREE_CODE (elt_size) != INTEGER_CST
175 || div_and_round_double (TRUNC_DIV_EXPR, 0,
176 soffset.low, soffset.high,
177 TREE_INT_CST_LOW (elt_size),
178 TREE_INT_CST_HIGH (elt_size),
179 &lquo, &hquo, &lrem, &hrem)
180 || lrem || hrem)
181 return NULL_TREE;
183 idx = build_int_cst_wide (idx_type, lquo, hquo);
186 /* Assume the low bound is zero. If there is a domain type, get the
187 low bound, if any, convert the index into that type, and add the
188 low bound. */
189 min_idx = build_int_cst (idx_type, 0);
190 domain_type = TYPE_DOMAIN (array_type);
191 if (domain_type)
193 idx_type = domain_type;
194 if (TYPE_MIN_VALUE (idx_type))
195 min_idx = TYPE_MIN_VALUE (idx_type);
196 else
197 min_idx = fold_convert (idx_type, min_idx);
199 if (TREE_CODE (min_idx) != INTEGER_CST)
200 return NULL_TREE;
202 elt_offset = fold_convert (idx_type, elt_offset);
205 if (!integer_zerop (min_idx))
206 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
207 if (!integer_zerop (elt_offset))
208 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
210 /* Make sure to possibly truncate late after offsetting. */
211 idx = fold_convert (idx_type, idx);
213 /* We don't want to construct access past array bounds. For example
214 char *(c[4]);
215 c[3][2];
216 should not be simplified into (*c)[14] or tree-vrp will
217 give false warnings.
218 This is only an issue for multi-dimensional arrays. */
219 if (TREE_CODE (elt_type) == ARRAY_TYPE
220 && domain_type)
222 if (TYPE_MAX_VALUE (domain_type)
223 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
224 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
225 return NULL_TREE;
226 else if (TYPE_MIN_VALUE (domain_type)
227 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
228 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
229 return NULL_TREE;
230 else if (compare_tree_int (idx, 0) < 0)
231 return NULL_TREE;
235 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
236 SET_EXPR_LOCATION (t, loc);
237 return t;
242 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
243 LOC is the location of original expression.
245 Before attempting the conversion strip off existing ADDR_EXPRs. */
247 tree
248 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
249 tree orig_type)
251 tree ret;
253 STRIP_NOPS (base);
254 if (TREE_CODE (base) != ADDR_EXPR)
255 return NULL_TREE;
257 base = TREE_OPERAND (base, 0);
258 if (types_compatible_p (orig_type, TREE_TYPE (base))
259 && integer_zerop (offset))
260 return base;
262 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
263 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
264 return ret;
265 return NULL_TREE;
268 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
269 LOC is the location of the original expression. */
271 tree
272 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
273 tree orig_type)
275 tree base, ret;
277 STRIP_NOPS (addr);
278 if (TREE_CODE (addr) != ADDR_EXPR)
279 return NULL_TREE;
280 base = TREE_OPERAND (addr, 0);
281 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
282 if (ret)
284 ret = build_fold_addr_expr (ret);
285 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
286 return NULL_TREE;
287 SET_EXPR_LOCATION (ret, loc);
290 return ret;
294 /* A quaint feature extant in our address arithmetic is that there
295 can be hidden type changes here. The type of the result need
296 not be the same as the type of the input pointer.
298 What we're after here is an expression of the form
299 (T *)(&array + const)
300 where array is OP0, const is OP1, RES_TYPE is T and
301 the cast doesn't actually exist, but is implicit in the
302 type of the POINTER_PLUS_EXPR. We'd like to turn this into
303 &array[x]
304 which may be able to propagate further. */
306 tree
307 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
309 tree ptd_type;
310 tree t;
312 /* The first operand should be an ADDR_EXPR. */
313 if (TREE_CODE (op0) != ADDR_EXPR)
314 return NULL_TREE;
315 op0 = TREE_OPERAND (op0, 0);
317 /* It had better be a constant. */
318 if (TREE_CODE (op1) != INTEGER_CST)
320 /* Or op0 should now be A[0] and the non-constant offset defined
321 via a multiplication by the array element size. */
322 if (TREE_CODE (op0) == ARRAY_REF
323 /* As we will end up creating a variable index array access
324 in the outermost array dimension make sure there isn't
325 a more inner array that the index could overflow to. */
326 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
327 && integer_zerop (TREE_OPERAND (op0, 1))
328 && TREE_CODE (op1) == SSA_NAME)
330 gimple offset_def = SSA_NAME_DEF_STMT (op1);
331 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
332 if (!host_integerp (elsz, 1)
333 || !is_gimple_assign (offset_def))
334 return NULL_TREE;
336 /* Do not build array references of something that we can't
337 see the true number of array dimensions for. */
338 if (!DECL_P (TREE_OPERAND (op0, 0))
339 && !handled_component_p (TREE_OPERAND (op0, 0)))
340 return NULL_TREE;
342 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
343 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
344 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
345 return build_fold_addr_expr
346 (build4 (ARRAY_REF, TREE_TYPE (op0),
347 TREE_OPERAND (op0, 0),
348 gimple_assign_rhs1 (offset_def),
349 TREE_OPERAND (op0, 2),
350 TREE_OPERAND (op0, 3)));
351 else if (integer_onep (elsz)
352 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
353 return build_fold_addr_expr
354 (build4 (ARRAY_REF, TREE_TYPE (op0),
355 TREE_OPERAND (op0, 0),
356 op1,
357 TREE_OPERAND (op0, 2),
358 TREE_OPERAND (op0, 3)));
360 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
361 /* Dto. */
362 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
363 && TREE_CODE (op1) == SSA_NAME)
365 gimple offset_def = SSA_NAME_DEF_STMT (op1);
366 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
367 if (!host_integerp (elsz, 1)
368 || !is_gimple_assign (offset_def))
369 return NULL_TREE;
371 /* Do not build array references of something that we can't
372 see the true number of array dimensions for. */
373 if (!DECL_P (op0)
374 && !handled_component_p (op0))
375 return NULL_TREE;
377 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
378 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
379 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
380 return build_fold_addr_expr
381 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
382 op0, gimple_assign_rhs1 (offset_def),
383 integer_zero_node, NULL_TREE));
384 else if (integer_onep (elsz)
385 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
386 return build_fold_addr_expr
387 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
388 op0, op1,
389 integer_zero_node, NULL_TREE));
392 return NULL_TREE;
395 /* If the first operand is an ARRAY_REF, expand it so that we can fold
396 the offset into it. */
397 while (TREE_CODE (op0) == ARRAY_REF)
399 tree array_obj = TREE_OPERAND (op0, 0);
400 tree array_idx = TREE_OPERAND (op0, 1);
401 tree elt_type = TREE_TYPE (op0);
402 tree elt_size = TYPE_SIZE_UNIT (elt_type);
403 tree min_idx;
405 if (TREE_CODE (array_idx) != INTEGER_CST)
406 break;
407 if (TREE_CODE (elt_size) != INTEGER_CST)
408 break;
410 /* Un-bias the index by the min index of the array type. */
411 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
412 if (min_idx)
414 min_idx = TYPE_MIN_VALUE (min_idx);
415 if (min_idx)
417 if (TREE_CODE (min_idx) != INTEGER_CST)
418 break;
420 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
421 if (!integer_zerop (min_idx))
422 array_idx = int_const_binop (MINUS_EXPR, array_idx,
423 min_idx, 0);
427 /* Convert the index to a byte offset. */
428 array_idx = fold_convert (sizetype, array_idx);
429 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
431 /* Update the operands for the next round, or for folding. */
432 op1 = int_const_binop (PLUS_EXPR,
433 array_idx, op1, 0);
434 op0 = array_obj;
437 ptd_type = TREE_TYPE (res_type);
438 /* If we want a pointer to void, reconstruct the reference from the
439 array element type. A pointer to that can be trivially converted
440 to void *. This happens as we fold (void *)(ptr p+ off). */
441 if (VOID_TYPE_P (ptd_type)
442 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
443 ptd_type = TREE_TYPE (TREE_TYPE (op0));
445 /* At which point we can try some of the same things as for indirects. */
446 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
447 if (t)
449 t = build_fold_addr_expr (t);
450 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
451 return NULL_TREE;
452 SET_EXPR_LOCATION (t, loc);
455 return t;
458 /* Subroutine of fold_stmt. We perform several simplifications of the
459 memory reference tree EXPR and make sure to re-gimplify them properly
460 after propagation of constant addresses. IS_LHS is true if the
461 reference is supposed to be an lvalue. */
463 static tree
464 maybe_fold_reference (tree expr, bool is_lhs)
466 tree *t = &expr;
468 if (TREE_CODE (expr) == ARRAY_REF
469 && !is_lhs)
471 tree tem = fold_read_from_constant_string (expr);
472 if (tem)
473 return tem;
476 /* ??? We might want to open-code the relevant remaining cases
477 to avoid using the generic fold. */
478 if (handled_component_p (*t)
479 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
481 tree tem = fold (*t);
482 if (tem != *t)
483 return tem;
486 while (handled_component_p (*t))
487 t = &TREE_OPERAND (*t, 0);
489 /* Fold back MEM_REFs to reference trees. */
490 if (TREE_CODE (*t) == MEM_REF
491 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
492 && integer_zerop (TREE_OPERAND (*t, 1))
493 && (TREE_THIS_VOLATILE (*t)
494 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
495 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
496 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
497 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
498 /* We have to look out here to not drop a required conversion
499 from the rhs to the lhs if is_lhs, but we don't have the
500 rhs here to verify that. Thus require strict type
501 compatibility. */
502 && types_compatible_p (TREE_TYPE (*t),
503 TREE_TYPE (TREE_OPERAND
504 (TREE_OPERAND (*t, 0), 0))))
506 tree tem;
507 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
508 tem = maybe_fold_reference (expr, is_lhs);
509 if (tem)
510 return tem;
511 return expr;
513 /* Canonicalize MEM_REFs invariant address operand. */
514 else if (TREE_CODE (*t) == MEM_REF
515 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
516 && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
517 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
519 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
520 TREE_OPERAND (*t, 0),
521 TREE_OPERAND (*t, 1));
522 if (tem)
524 *t = tem;
525 tem = maybe_fold_reference (expr, is_lhs);
526 if (tem)
527 return tem;
528 return expr;
531 else if (!is_lhs
532 && DECL_P (*t))
534 tree tem = get_symbol_constant_value (*t);
535 if (tem
536 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
538 *t = unshare_expr (tem);
539 tem = maybe_fold_reference (expr, is_lhs);
540 if (tem)
541 return tem;
542 return expr;
546 return NULL_TREE;
550 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
551 replacement rhs for the statement or NULL_TREE if no simplification
552 could be made. It is assumed that the operands have been previously
553 folded. */
555 static tree
556 fold_gimple_assign (gimple_stmt_iterator *si)
558 gimple stmt = gsi_stmt (*si);
559 enum tree_code subcode = gimple_assign_rhs_code (stmt);
560 location_t loc = gimple_location (stmt);
562 tree result = NULL_TREE;
564 switch (get_gimple_rhs_class (subcode))
566 case GIMPLE_SINGLE_RHS:
568 tree rhs = gimple_assign_rhs1 (stmt);
570 /* Try to fold a conditional expression. */
571 if (TREE_CODE (rhs) == COND_EXPR)
573 tree op0 = COND_EXPR_COND (rhs);
574 tree tem;
575 bool set = false;
576 location_t cond_loc = EXPR_LOCATION (rhs);
578 if (COMPARISON_CLASS_P (op0))
580 fold_defer_overflow_warnings ();
581 tem = fold_binary_loc (cond_loc,
582 TREE_CODE (op0), TREE_TYPE (op0),
583 TREE_OPERAND (op0, 0),
584 TREE_OPERAND (op0, 1));
585 /* This is actually a conditional expression, not a GIMPLE
586 conditional statement, however, the valid_gimple_rhs_p
587 test still applies. */
588 set = (tem && is_gimple_condexpr (tem)
589 && valid_gimple_rhs_p (tem));
590 fold_undefer_overflow_warnings (set, stmt, 0);
592 else if (is_gimple_min_invariant (op0))
594 tem = op0;
595 set = true;
597 else
598 return NULL_TREE;
600 if (set)
601 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
602 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
605 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
606 return maybe_fold_tmr (rhs);
608 else if (REFERENCE_CLASS_P (rhs))
609 return maybe_fold_reference (rhs, false);
611 else if (TREE_CODE (rhs) == ADDR_EXPR)
613 tree ref = TREE_OPERAND (rhs, 0);
614 tree tem = maybe_fold_reference (ref, true);
615 if (tem
616 && TREE_CODE (tem) == MEM_REF
617 && integer_zerop (TREE_OPERAND (tem, 1)))
618 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
619 else if (tem)
620 result = fold_convert (TREE_TYPE (rhs),
621 build_fold_addr_expr_loc (loc, tem));
622 else if (TREE_CODE (ref) == MEM_REF
623 && integer_zerop (TREE_OPERAND (ref, 1)))
624 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
627 else if (TREE_CODE (rhs) == CONSTRUCTOR
628 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
629 && (CONSTRUCTOR_NELTS (rhs)
630 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
632 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
633 unsigned i;
634 tree val;
636 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
637 if (TREE_CODE (val) != INTEGER_CST
638 && TREE_CODE (val) != REAL_CST
639 && TREE_CODE (val) != FIXED_CST)
640 return NULL_TREE;
642 return build_vector_from_ctor (TREE_TYPE (rhs),
643 CONSTRUCTOR_ELTS (rhs));
646 else if (DECL_P (rhs))
647 return unshare_expr (get_symbol_constant_value (rhs));
649 /* If we couldn't fold the RHS, hand over to the generic
650 fold routines. */
651 if (result == NULL_TREE)
652 result = fold (rhs);
654 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
655 that may have been added by fold, and "useless" type
656 conversions that might now be apparent due to propagation. */
657 STRIP_USELESS_TYPE_CONVERSION (result);
659 if (result != rhs && valid_gimple_rhs_p (result))
660 return result;
662 return NULL_TREE;
664 break;
666 case GIMPLE_UNARY_RHS:
668 tree rhs = gimple_assign_rhs1 (stmt);
670 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
671 if (result)
673 /* If the operation was a conversion do _not_ mark a
674 resulting constant with TREE_OVERFLOW if the original
675 constant was not. These conversions have implementation
676 defined behavior and retaining the TREE_OVERFLOW flag
677 here would confuse later passes such as VRP. */
678 if (CONVERT_EXPR_CODE_P (subcode)
679 && TREE_CODE (result) == INTEGER_CST
680 && TREE_CODE (rhs) == INTEGER_CST)
681 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
683 STRIP_USELESS_TYPE_CONVERSION (result);
684 if (valid_gimple_rhs_p (result))
685 return result;
687 else if (CONVERT_EXPR_CODE_P (subcode)
688 && POINTER_TYPE_P (gimple_expr_type (stmt))
689 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
691 tree type = gimple_expr_type (stmt);
692 tree t = maybe_fold_offset_to_address (loc,
693 gimple_assign_rhs1 (stmt),
694 integer_zero_node, type);
695 if (t)
696 return t;
699 break;
701 case GIMPLE_BINARY_RHS:
702 /* Try to fold pointer addition. */
703 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
705 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
706 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
708 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
709 if (!useless_type_conversion_p
710 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
711 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
713 result = maybe_fold_stmt_addition (gimple_location (stmt),
714 type,
715 gimple_assign_rhs1 (stmt),
716 gimple_assign_rhs2 (stmt));
719 if (!result)
720 result = fold_binary_loc (loc, subcode,
721 TREE_TYPE (gimple_assign_lhs (stmt)),
722 gimple_assign_rhs1 (stmt),
723 gimple_assign_rhs2 (stmt));
725 if (result)
727 STRIP_USELESS_TYPE_CONVERSION (result);
728 if (valid_gimple_rhs_p (result))
729 return result;
731 /* Fold might have produced non-GIMPLE, so if we trust it blindly
732 we lose canonicalization opportunities. Do not go again
733 through fold here though, or the same non-GIMPLE will be
734 produced. */
735 if (commutative_tree_code (subcode)
736 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
737 gimple_assign_rhs2 (stmt), false))
738 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
739 gimple_assign_rhs2 (stmt),
740 gimple_assign_rhs1 (stmt));
742 break;
744 case GIMPLE_TERNARY_RHS:
745 result = fold_ternary_loc (loc, subcode,
746 TREE_TYPE (gimple_assign_lhs (stmt)),
747 gimple_assign_rhs1 (stmt),
748 gimple_assign_rhs2 (stmt),
749 gimple_assign_rhs3 (stmt));
751 if (result)
753 STRIP_USELESS_TYPE_CONVERSION (result);
754 if (valid_gimple_rhs_p (result))
755 return result;
757 /* Fold might have produced non-GIMPLE, so if we trust it blindly
758 we lose canonicalization opportunities. Do not go again
759 through fold here though, or the same non-GIMPLE will be
760 produced. */
761 if (commutative_ternary_tree_code (subcode)
762 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
763 gimple_assign_rhs2 (stmt), false))
764 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
765 gimple_assign_rhs2 (stmt),
766 gimple_assign_rhs1 (stmt),
767 gimple_assign_rhs3 (stmt));
769 break;
771 case GIMPLE_INVALID_RHS:
772 gcc_unreachable ();
775 return NULL_TREE;
778 /* Attempt to fold a conditional statement. Return true if any changes were
779 made. We only attempt to fold the condition expression, and do not perform
780 any transformation that would require alteration of the cfg. It is
781 assumed that the operands have been previously folded. */
783 static bool
784 fold_gimple_cond (gimple stmt)
786 tree result = fold_binary_loc (gimple_location (stmt),
787 gimple_cond_code (stmt),
788 boolean_type_node,
789 gimple_cond_lhs (stmt),
790 gimple_cond_rhs (stmt));
792 if (result)
794 STRIP_USELESS_TYPE_CONVERSION (result);
795 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
797 gimple_cond_set_condition_from_tree (stmt, result);
798 return true;
802 return false;
805 /* Convert EXPR into a GIMPLE value suitable for substitution on the
806 RHS of an assignment. Insert the necessary statements before
807 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
808 is replaced. If the call is expected to produces a result, then it
809 is replaced by an assignment of the new RHS to the result variable.
810 If the result is to be ignored, then the call is replaced by a
811 GIMPLE_NOP. A proper VDEF chain is retained by making the first
812 VUSE and the last VDEF of the whole sequence be the same as the replaced
813 statement and using new SSA names for stores in between. */
815 void
816 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
818 tree lhs;
819 tree tmp = NULL_TREE; /* Silence warning. */
820 gimple stmt, new_stmt;
821 gimple_stmt_iterator i;
822 gimple_seq stmts = gimple_seq_alloc();
823 struct gimplify_ctx gctx;
824 gimple last = NULL;
825 gimple laststore = NULL;
826 tree reaching_vuse;
828 stmt = gsi_stmt (*si_p);
830 gcc_assert (is_gimple_call (stmt));
832 lhs = gimple_call_lhs (stmt);
833 reaching_vuse = gimple_vuse (stmt);
835 push_gimplify_context (&gctx);
837 if (lhs == NULL_TREE)
838 gimplify_and_add (expr, &stmts);
839 else
840 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
842 pop_gimplify_context (NULL);
844 if (gimple_has_location (stmt))
845 annotate_all_with_location (stmts, gimple_location (stmt));
847 /* The replacement can expose previously unreferenced variables. */
848 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
850 if (last)
852 gsi_insert_before (si_p, last, GSI_NEW_STMT);
853 gsi_next (si_p);
855 new_stmt = gsi_stmt (i);
856 find_new_referenced_vars (new_stmt);
857 mark_symbols_for_renaming (new_stmt);
858 /* If the new statement has a VUSE, update it with exact SSA name we
859 know will reach this one. */
860 if (gimple_vuse (new_stmt))
862 /* If we've also seen a previous store create a new VDEF for
863 the latter one, and make that the new reaching VUSE. */
864 if (laststore)
866 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
867 gimple_set_vdef (laststore, reaching_vuse);
868 update_stmt (laststore);
869 laststore = NULL;
871 gimple_set_vuse (new_stmt, reaching_vuse);
872 gimple_set_modified (new_stmt, true);
874 if (gimple_assign_single_p (new_stmt)
875 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
877 laststore = new_stmt;
879 last = new_stmt;
882 if (lhs == NULL_TREE)
884 /* If we replace a call without LHS that has a VDEF and our new
885 sequence ends with a store we must make that store have the same
886 vdef in order not to break the sequencing. This can happen
887 for instance when folding memcpy calls into assignments. */
888 if (gimple_vdef (stmt) && laststore)
890 gimple_set_vdef (laststore, gimple_vdef (stmt));
891 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
892 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
893 update_stmt (laststore);
895 else
897 unlink_stmt_vdef (stmt);
898 release_defs (stmt);
900 new_stmt = last;
902 else
904 if (last)
906 gsi_insert_before (si_p, last, GSI_NEW_STMT);
907 gsi_next (si_p);
909 if (laststore && is_gimple_reg (lhs))
911 gimple_set_vdef (laststore, gimple_vdef (stmt));
912 update_stmt (laststore);
913 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
914 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
915 laststore = NULL;
917 else if (laststore)
919 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
920 gimple_set_vdef (laststore, reaching_vuse);
921 update_stmt (laststore);
922 laststore = NULL;
924 new_stmt = gimple_build_assign (lhs, tmp);
925 if (!is_gimple_reg (tmp))
926 gimple_set_vuse (new_stmt, reaching_vuse);
927 if (!is_gimple_reg (lhs))
929 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
930 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
931 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
935 gimple_set_location (new_stmt, gimple_location (stmt));
936 gsi_replace (si_p, new_stmt, false);
939 /* Return the string length, maximum string length or maximum value of
940 ARG in LENGTH.
941 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
942 is not NULL and, for TYPE == 0, its value is not equal to the length
943 we determine or if we are unable to determine the length or value,
944 return false. VISITED is a bitmap of visited variables.
945 TYPE is 0 if string length should be returned, 1 for maximum string
946 length and 2 for maximum value ARG can have. */
948 static bool
949 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
951 tree var, val;
952 gimple def_stmt;
954 if (TREE_CODE (arg) != SSA_NAME)
956 if (TREE_CODE (arg) == COND_EXPR)
957 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
958 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
959 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
960 else if (TREE_CODE (arg) == ADDR_EXPR
961 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
962 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
964 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
965 if (TREE_CODE (aop0) == INDIRECT_REF
966 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
967 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
968 length, visited, type);
971 if (type == 2)
973 val = arg;
974 if (TREE_CODE (val) != INTEGER_CST
975 || tree_int_cst_sgn (val) < 0)
976 return false;
978 else
979 val = c_strlen (arg, 1);
980 if (!val)
981 return false;
983 if (*length)
985 if (type > 0)
987 if (TREE_CODE (*length) != INTEGER_CST
988 || TREE_CODE (val) != INTEGER_CST)
989 return false;
991 if (tree_int_cst_lt (*length, val))
992 *length = val;
993 return true;
995 else if (simple_cst_equal (val, *length) != 1)
996 return false;
999 *length = val;
1000 return true;
1003 /* If we were already here, break the infinite cycle. */
1004 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
1005 return true;
1006 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
1008 var = arg;
1009 def_stmt = SSA_NAME_DEF_STMT (var);
1011 switch (gimple_code (def_stmt))
1013 case GIMPLE_ASSIGN:
1014 /* The RHS of the statement defining VAR must either have a
1015 constant length or come from another SSA_NAME with a constant
1016 length. */
1017 if (gimple_assign_single_p (def_stmt)
1018 || gimple_assign_unary_nop_p (def_stmt))
1020 tree rhs = gimple_assign_rhs1 (def_stmt);
1021 return get_maxval_strlen (rhs, length, visited, type);
1023 return false;
1025 case GIMPLE_PHI:
1027 /* All the arguments of the PHI node must have the same constant
1028 length. */
1029 unsigned i;
1031 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1033 tree arg = gimple_phi_arg (def_stmt, i)->def;
1035 /* If this PHI has itself as an argument, we cannot
1036 determine the string length of this argument. However,
1037 if we can find a constant string length for the other
1038 PHI args then we can still be sure that this is a
1039 constant string length. So be optimistic and just
1040 continue with the next argument. */
1041 if (arg == gimple_phi_result (def_stmt))
1042 continue;
1044 if (!get_maxval_strlen (arg, length, visited, type))
1045 return false;
1048 return true;
1050 default:
1051 return false;
1056 /* Fold builtin call in statement STMT. Returns a simplified tree.
1057 We may return a non-constant expression, including another call
1058 to a different function and with different arguments, e.g.,
1059 substituting memcpy for strcpy when the string length is known.
1060 Note that some builtins expand into inline code that may not
1061 be valid in GIMPLE. Callers must take care. */
1063 tree
1064 gimple_fold_builtin (gimple stmt)
1066 tree result, val[3];
1067 tree callee, a;
1068 int arg_idx, type;
1069 bitmap visited;
1070 bool ignore;
1071 int nargs;
1072 location_t loc = gimple_location (stmt);
1074 gcc_assert (is_gimple_call (stmt));
1076 ignore = (gimple_call_lhs (stmt) == NULL);
1078 /* First try the generic builtin folder. If that succeeds, return the
1079 result directly. */
1080 result = fold_call_stmt (stmt, ignore);
1081 if (result)
1083 if (ignore)
1084 STRIP_NOPS (result);
1085 return result;
1088 /* Ignore MD builtins. */
1089 callee = gimple_call_fndecl (stmt);
1090 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1091 return NULL_TREE;
1093 /* If the builtin could not be folded, and it has no argument list,
1094 we're done. */
1095 nargs = gimple_call_num_args (stmt);
1096 if (nargs == 0)
1097 return NULL_TREE;
1099 /* Limit the work only for builtins we know how to simplify. */
1100 switch (DECL_FUNCTION_CODE (callee))
1102 case BUILT_IN_STRLEN:
1103 case BUILT_IN_FPUTS:
1104 case BUILT_IN_FPUTS_UNLOCKED:
1105 arg_idx = 0;
1106 type = 0;
1107 break;
1108 case BUILT_IN_STRCPY:
1109 case BUILT_IN_STRNCPY:
1110 arg_idx = 1;
1111 type = 0;
1112 break;
1113 case BUILT_IN_MEMCPY_CHK:
1114 case BUILT_IN_MEMPCPY_CHK:
1115 case BUILT_IN_MEMMOVE_CHK:
1116 case BUILT_IN_MEMSET_CHK:
1117 case BUILT_IN_STRNCPY_CHK:
1118 arg_idx = 2;
1119 type = 2;
1120 break;
1121 case BUILT_IN_STRCPY_CHK:
1122 case BUILT_IN_STPCPY_CHK:
1123 arg_idx = 1;
1124 type = 1;
1125 break;
1126 case BUILT_IN_SNPRINTF_CHK:
1127 case BUILT_IN_VSNPRINTF_CHK:
1128 arg_idx = 1;
1129 type = 2;
1130 break;
1131 default:
1132 return NULL_TREE;
1135 if (arg_idx >= nargs)
1136 return NULL_TREE;
1138 /* Try to use the dataflow information gathered by the CCP process. */
1139 visited = BITMAP_ALLOC (NULL);
1140 bitmap_clear (visited);
1142 memset (val, 0, sizeof (val));
1143 a = gimple_call_arg (stmt, arg_idx);
1144 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1145 val[arg_idx] = NULL_TREE;
1147 BITMAP_FREE (visited);
1149 result = NULL_TREE;
1150 switch (DECL_FUNCTION_CODE (callee))
1152 case BUILT_IN_STRLEN:
1153 if (val[0] && nargs == 1)
1155 tree new_val =
1156 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1158 /* If the result is not a valid gimple value, or not a cast
1159 of a valid gimple value, then we cannot use the result. */
1160 if (is_gimple_val (new_val)
1161 || (is_gimple_cast (new_val)
1162 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1163 return new_val;
1165 break;
1167 case BUILT_IN_STRCPY:
1168 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1169 result = fold_builtin_strcpy (loc, callee,
1170 gimple_call_arg (stmt, 0),
1171 gimple_call_arg (stmt, 1),
1172 val[1]);
1173 break;
1175 case BUILT_IN_STRNCPY:
1176 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1177 result = fold_builtin_strncpy (loc, callee,
1178 gimple_call_arg (stmt, 0),
1179 gimple_call_arg (stmt, 1),
1180 gimple_call_arg (stmt, 2),
1181 val[1]);
1182 break;
1184 case BUILT_IN_FPUTS:
1185 if (nargs == 2)
1186 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1187 gimple_call_arg (stmt, 1),
1188 ignore, false, val[0]);
1189 break;
1191 case BUILT_IN_FPUTS_UNLOCKED:
1192 if (nargs == 2)
1193 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1194 gimple_call_arg (stmt, 1),
1195 ignore, true, val[0]);
1196 break;
1198 case BUILT_IN_MEMCPY_CHK:
1199 case BUILT_IN_MEMPCPY_CHK:
1200 case BUILT_IN_MEMMOVE_CHK:
1201 case BUILT_IN_MEMSET_CHK:
1202 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1203 result = fold_builtin_memory_chk (loc, callee,
1204 gimple_call_arg (stmt, 0),
1205 gimple_call_arg (stmt, 1),
1206 gimple_call_arg (stmt, 2),
1207 gimple_call_arg (stmt, 3),
1208 val[2], ignore,
1209 DECL_FUNCTION_CODE (callee));
1210 break;
1212 case BUILT_IN_STRCPY_CHK:
1213 case BUILT_IN_STPCPY_CHK:
1214 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1215 result = fold_builtin_stxcpy_chk (loc, callee,
1216 gimple_call_arg (stmt, 0),
1217 gimple_call_arg (stmt, 1),
1218 gimple_call_arg (stmt, 2),
1219 val[1], ignore,
1220 DECL_FUNCTION_CODE (callee));
1221 break;
1223 case BUILT_IN_STRNCPY_CHK:
1224 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1225 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1226 gimple_call_arg (stmt, 1),
1227 gimple_call_arg (stmt, 2),
1228 gimple_call_arg (stmt, 3),
1229 val[2]);
1230 break;
1232 case BUILT_IN_SNPRINTF_CHK:
1233 case BUILT_IN_VSNPRINTF_CHK:
1234 if (val[1] && is_gimple_val (val[1]))
1235 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1236 DECL_FUNCTION_CODE (callee));
1237 break;
1239 default:
1240 gcc_unreachable ();
1243 if (result && ignore)
1244 result = fold_ignored_result (result);
1245 return result;
1248 /* Return the first of the base binfos of BINFO that has virtual functions. */
1250 static tree
1251 get_first_base_binfo_with_virtuals (tree binfo)
1253 int i;
1254 tree base_binfo;
1256 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1257 if (BINFO_VIRTUALS (base_binfo))
1258 return base_binfo;
1260 return NULL_TREE;
1264 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1265 it is found or NULL_TREE if it is not. */
1267 static tree
1268 get_base_binfo_for_type (tree binfo, tree type)
1270 int i;
1271 tree base_binfo;
1272 tree res = NULL_TREE;
1274 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1275 if (TREE_TYPE (base_binfo) == type)
1277 gcc_assert (!res);
1278 res = base_binfo;
1281 return res;
1284 /* Return a binfo describing the part of object referenced by expression REF.
1285 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1286 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1287 a simple declaration, indirect reference or an SSA_NAME. If the function
1288 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1289 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1290 Otherwise the first non-artificial field declaration or the base declaration
1291 will be examined to get the encapsulating type. */
1293 tree
1294 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1296 while (true)
1298 if (TREE_CODE (ref) == COMPONENT_REF)
1300 tree par_type;
1301 tree binfo, base_binfo;
1302 tree field = TREE_OPERAND (ref, 1);
1304 if (!DECL_ARTIFICIAL (field))
1306 tree type = TREE_TYPE (field);
1307 if (TREE_CODE (type) == RECORD_TYPE)
1308 return TYPE_BINFO (type);
1309 else
1310 return NULL_TREE;
1313 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1314 binfo = TYPE_BINFO (par_type);
1315 if (!binfo
1316 || BINFO_N_BASE_BINFOS (binfo) == 0)
1317 return NULL_TREE;
1319 base_binfo = get_first_base_binfo_with_virtuals (binfo);
1320 if (base_binfo && BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1322 tree d_binfo;
1324 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1325 known_binfo);
1326 /* Get descendant binfo. */
1327 if (!d_binfo)
1328 return NULL_TREE;
1329 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1332 ref = TREE_OPERAND (ref, 0);
1334 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1335 return TYPE_BINFO (TREE_TYPE (ref));
1336 else if (known_binfo
1337 && (TREE_CODE (ref) == SSA_NAME
1338 || TREE_CODE (ref) == MEM_REF))
1339 return known_binfo;
1340 else
1341 return NULL_TREE;
1345 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1346 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1347 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1349 tree
1350 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1352 HOST_WIDE_INT i;
1353 tree v, fndecl;
1354 struct cgraph_node *node;
1356 v = BINFO_VIRTUALS (known_binfo);
1357 i = 0;
1358 while (i != token)
1360 i += (TARGET_VTABLE_USES_DESCRIPTORS
1361 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1362 v = TREE_CHAIN (v);
1365 fndecl = TREE_VALUE (v);
1366 node = cgraph_get_node (fndecl);
1367 /* When cgraph node is missing and function is not public, we cannot
1368 devirtualize. This can happen in WHOPR when the actual method
1369 ends up in other partition, because we found devirtualization
1370 possibility too late. */
1371 if ((!node || (!node->analyzed && !node->in_other_partition))
1372 && (!TREE_PUBLIC (fndecl) || DECL_COMDAT (fndecl)))
1373 return NULL;
1374 return build_fold_addr_expr (fndecl);
1378 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1379 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1380 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1381 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1382 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1384 tree
1385 gimple_fold_obj_type_ref (tree ref, tree known_type)
1387 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1388 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1389 tree binfo;
1391 if (TREE_CODE (obj) == ADDR_EXPR)
1392 obj = TREE_OPERAND (obj, 0);
1394 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1395 if (binfo)
1397 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1398 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1400 else
1401 return NULL_TREE;
1404 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1405 The statement may be replaced by another statement, e.g., if the call
1406 simplifies to a constant value. Return true if any changes were made.
1407 It is assumed that the operands have been previously folded. */
1409 static bool
1410 fold_gimple_call (gimple_stmt_iterator *gsi)
1412 gimple stmt = gsi_stmt (*gsi);
1414 tree callee = gimple_call_fndecl (stmt);
1416 /* Check for builtins that CCP can handle using information not
1417 available in the generic fold routines. */
1418 if (callee && DECL_BUILT_IN (callee))
1420 tree result = gimple_fold_builtin (stmt);
1422 if (result)
1424 if (!update_call_from_tree (gsi, result))
1425 gimplify_and_update_call_from_tree (gsi, result);
1426 return true;
1429 else
1431 /* ??? Should perhaps do this in fold proper. However, doing it
1432 there requires that we create a new CALL_EXPR, and that requires
1433 copying EH region info to the new node. Easier to just do it
1434 here where we can just smash the call operand. */
1435 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1436 callee = gimple_call_fn (stmt);
1437 if (TREE_CODE (callee) == OBJ_TYPE_REF
1438 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1440 tree t;
1442 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1443 if (t)
1445 gimple_call_set_fn (stmt, t);
1446 return true;
1451 return false;
1454 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1455 distinguishes both cases. */
1457 static bool
1458 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1460 bool changed = false;
1461 gimple stmt = gsi_stmt (*gsi);
1462 unsigned i;
1464 /* Fold the main computation performed by the statement. */
1465 switch (gimple_code (stmt))
1467 case GIMPLE_ASSIGN:
1469 unsigned old_num_ops = gimple_num_ops (stmt);
1470 tree new_rhs = fold_gimple_assign (gsi);
1471 tree lhs = gimple_assign_lhs (stmt);
1472 if (new_rhs
1473 && !useless_type_conversion_p (TREE_TYPE (lhs),
1474 TREE_TYPE (new_rhs)))
1475 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1476 if (new_rhs
1477 && (!inplace
1478 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1480 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1481 changed = true;
1483 break;
1486 case GIMPLE_COND:
1487 changed |= fold_gimple_cond (stmt);
1488 break;
1490 case GIMPLE_CALL:
1491 /* Fold *& in call arguments. */
1492 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1493 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1495 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1496 if (tmp)
1498 gimple_call_set_arg (stmt, i, tmp);
1499 changed = true;
1502 /* The entire statement may be replaced in this case. */
1503 if (!inplace)
1504 changed |= fold_gimple_call (gsi);
1505 break;
1507 case GIMPLE_ASM:
1508 /* Fold *& in asm operands. */
1509 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1511 tree link = gimple_asm_output_op (stmt, i);
1512 tree op = TREE_VALUE (link);
1513 if (REFERENCE_CLASS_P (op)
1514 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1516 TREE_VALUE (link) = op;
1517 changed = true;
1520 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1522 tree link = gimple_asm_input_op (stmt, i);
1523 tree op = TREE_VALUE (link);
1524 if (REFERENCE_CLASS_P (op)
1525 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1527 TREE_VALUE (link) = op;
1528 changed = true;
1531 break;
1533 default:;
1536 stmt = gsi_stmt (*gsi);
1538 /* Fold *& on the lhs. */
1539 if (gimple_has_lhs (stmt))
1541 tree lhs = gimple_get_lhs (stmt);
1542 if (lhs && REFERENCE_CLASS_P (lhs))
1544 tree new_lhs = maybe_fold_reference (lhs, true);
1545 if (new_lhs)
1547 gimple_set_lhs (stmt, new_lhs);
1548 changed = true;
1553 return changed;
1556 /* Fold the statement pointed to by GSI. In some cases, this function may
1557 replace the whole statement with a new one. Returns true iff folding
1558 makes any changes.
1559 The statement pointed to by GSI should be in valid gimple form but may
1560 be in unfolded state as resulting from for example constant propagation
1561 which can produce *&x = 0. */
1563 bool
1564 fold_stmt (gimple_stmt_iterator *gsi)
1566 return fold_stmt_1 (gsi, false);
1569 /* Perform the minimal folding on statement STMT. Only operations like
1570 *&x created by constant propagation are handled. The statement cannot
1571 be replaced with a new one. Return true if the statement was
1572 changed, false otherwise.
1573 The statement STMT should be in valid gimple form but may
1574 be in unfolded state as resulting from for example constant propagation
1575 which can produce *&x = 0. */
1577 bool
1578 fold_stmt_inplace (gimple stmt)
1580 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1581 bool changed = fold_stmt_1 (&gsi, true);
1582 gcc_assert (gsi_stmt (gsi) == stmt);
1583 return changed;
1586 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1587 if EXPR is null or we don't know how.
1588 If non-null, the result always has boolean type. */
1590 static tree
1591 canonicalize_bool (tree expr, bool invert)
1593 if (!expr)
1594 return NULL_TREE;
1595 else if (invert)
1597 if (integer_nonzerop (expr))
1598 return boolean_false_node;
1599 else if (integer_zerop (expr))
1600 return boolean_true_node;
1601 else if (TREE_CODE (expr) == SSA_NAME)
1602 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1603 build_int_cst (TREE_TYPE (expr), 0));
1604 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1605 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1606 boolean_type_node,
1607 TREE_OPERAND (expr, 0),
1608 TREE_OPERAND (expr, 1));
1609 else
1610 return NULL_TREE;
1612 else
1614 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1615 return expr;
1616 if (integer_nonzerop (expr))
1617 return boolean_true_node;
1618 else if (integer_zerop (expr))
1619 return boolean_false_node;
1620 else if (TREE_CODE (expr) == SSA_NAME)
1621 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1622 build_int_cst (TREE_TYPE (expr), 0));
1623 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1624 return fold_build2 (TREE_CODE (expr),
1625 boolean_type_node,
1626 TREE_OPERAND (expr, 0),
1627 TREE_OPERAND (expr, 1));
1628 else
1629 return NULL_TREE;
1633 /* Check to see if a boolean expression EXPR is logically equivalent to the
1634 comparison (OP1 CODE OP2). Check for various identities involving
1635 SSA_NAMEs. */
1637 static bool
1638 same_bool_comparison_p (const_tree expr, enum tree_code code,
1639 const_tree op1, const_tree op2)
1641 gimple s;
1643 /* The obvious case. */
1644 if (TREE_CODE (expr) == code
1645 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1646 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1647 return true;
1649 /* Check for comparing (name, name != 0) and the case where expr
1650 is an SSA_NAME with a definition matching the comparison. */
1651 if (TREE_CODE (expr) == SSA_NAME
1652 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1654 if (operand_equal_p (expr, op1, 0))
1655 return ((code == NE_EXPR && integer_zerop (op2))
1656 || (code == EQ_EXPR && integer_nonzerop (op2)));
1657 s = SSA_NAME_DEF_STMT (expr);
1658 if (is_gimple_assign (s)
1659 && gimple_assign_rhs_code (s) == code
1660 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1661 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1662 return true;
1665 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1666 of name is a comparison, recurse. */
1667 if (TREE_CODE (op1) == SSA_NAME
1668 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1670 s = SSA_NAME_DEF_STMT (op1);
1671 if (is_gimple_assign (s)
1672 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1674 enum tree_code c = gimple_assign_rhs_code (s);
1675 if ((c == NE_EXPR && integer_zerop (op2))
1676 || (c == EQ_EXPR && integer_nonzerop (op2)))
1677 return same_bool_comparison_p (expr, c,
1678 gimple_assign_rhs1 (s),
1679 gimple_assign_rhs2 (s));
1680 if ((c == EQ_EXPR && integer_zerop (op2))
1681 || (c == NE_EXPR && integer_nonzerop (op2)))
1682 return same_bool_comparison_p (expr,
1683 invert_tree_comparison (c, false),
1684 gimple_assign_rhs1 (s),
1685 gimple_assign_rhs2 (s));
1688 return false;
1691 /* Check to see if two boolean expressions OP1 and OP2 are logically
1692 equivalent. */
1694 static bool
1695 same_bool_result_p (const_tree op1, const_tree op2)
1697 /* Simple cases first. */
1698 if (operand_equal_p (op1, op2, 0))
1699 return true;
1701 /* Check the cases where at least one of the operands is a comparison.
1702 These are a bit smarter than operand_equal_p in that they apply some
1703 identifies on SSA_NAMEs. */
1704 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1705 && same_bool_comparison_p (op1, TREE_CODE (op2),
1706 TREE_OPERAND (op2, 0),
1707 TREE_OPERAND (op2, 1)))
1708 return true;
1709 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1710 && same_bool_comparison_p (op2, TREE_CODE (op1),
1711 TREE_OPERAND (op1, 0),
1712 TREE_OPERAND (op1, 1)))
1713 return true;
1715 /* Default case. */
1716 return false;
1719 /* Forward declarations for some mutually recursive functions. */
1721 static tree
1722 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1723 enum tree_code code2, tree op2a, tree op2b);
1724 static tree
1725 and_var_with_comparison (tree var, bool invert,
1726 enum tree_code code2, tree op2a, tree op2b);
1727 static tree
1728 and_var_with_comparison_1 (gimple stmt,
1729 enum tree_code code2, tree op2a, tree op2b);
1730 static tree
1731 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1732 enum tree_code code2, tree op2a, tree op2b);
1733 static tree
1734 or_var_with_comparison (tree var, bool invert,
1735 enum tree_code code2, tree op2a, tree op2b);
1736 static tree
1737 or_var_with_comparison_1 (gimple stmt,
1738 enum tree_code code2, tree op2a, tree op2b);
1740 /* Helper function for and_comparisons_1: try to simplify the AND of the
1741 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1742 If INVERT is true, invert the value of the VAR before doing the AND.
1743 Return NULL_EXPR if we can't simplify this to a single expression. */
1745 static tree
1746 and_var_with_comparison (tree var, bool invert,
1747 enum tree_code code2, tree op2a, tree op2b)
1749 tree t;
1750 gimple stmt = SSA_NAME_DEF_STMT (var);
1752 /* We can only deal with variables whose definitions are assignments. */
1753 if (!is_gimple_assign (stmt))
1754 return NULL_TREE;
1756 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1757 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1758 Then we only have to consider the simpler non-inverted cases. */
1759 if (invert)
1760 t = or_var_with_comparison_1 (stmt,
1761 invert_tree_comparison (code2, false),
1762 op2a, op2b);
1763 else
1764 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1765 return canonicalize_bool (t, invert);
1768 /* Try to simplify the AND of the ssa variable defined by the assignment
1769 STMT with the comparison specified by (OP2A CODE2 OP2B).
1770 Return NULL_EXPR if we can't simplify this to a single expression. */
1772 static tree
1773 and_var_with_comparison_1 (gimple stmt,
1774 enum tree_code code2, tree op2a, tree op2b)
1776 tree var = gimple_assign_lhs (stmt);
1777 tree true_test_var = NULL_TREE;
1778 tree false_test_var = NULL_TREE;
1779 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1781 /* Check for identities like (var AND (var == 0)) => false. */
1782 if (TREE_CODE (op2a) == SSA_NAME
1783 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1785 if ((code2 == NE_EXPR && integer_zerop (op2b))
1786 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1788 true_test_var = op2a;
1789 if (var == true_test_var)
1790 return var;
1792 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1793 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1795 false_test_var = op2a;
1796 if (var == false_test_var)
1797 return boolean_false_node;
1801 /* If the definition is a comparison, recurse on it. */
1802 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1804 tree t = and_comparisons_1 (innercode,
1805 gimple_assign_rhs1 (stmt),
1806 gimple_assign_rhs2 (stmt),
1807 code2,
1808 op2a,
1809 op2b);
1810 if (t)
1811 return t;
1814 /* If the definition is an AND or OR expression, we may be able to
1815 simplify by reassociating. */
1816 if (innercode == TRUTH_AND_EXPR
1817 || innercode == TRUTH_OR_EXPR
1818 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1819 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1821 tree inner1 = gimple_assign_rhs1 (stmt);
1822 tree inner2 = gimple_assign_rhs2 (stmt);
1823 gimple s;
1824 tree t;
1825 tree partial = NULL_TREE;
1826 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1828 /* Check for boolean identities that don't require recursive examination
1829 of inner1/inner2:
1830 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1831 inner1 AND (inner1 OR inner2) => inner1
1832 !inner1 AND (inner1 AND inner2) => false
1833 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1834 Likewise for similar cases involving inner2. */
1835 if (inner1 == true_test_var)
1836 return (is_and ? var : inner1);
1837 else if (inner2 == true_test_var)
1838 return (is_and ? var : inner2);
1839 else if (inner1 == false_test_var)
1840 return (is_and
1841 ? boolean_false_node
1842 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1843 else if (inner2 == false_test_var)
1844 return (is_and
1845 ? boolean_false_node
1846 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1848 /* Next, redistribute/reassociate the AND across the inner tests.
1849 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1850 if (TREE_CODE (inner1) == SSA_NAME
1851 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1852 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1853 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1854 gimple_assign_rhs1 (s),
1855 gimple_assign_rhs2 (s),
1856 code2, op2a, op2b)))
1858 /* Handle the AND case, where we are reassociating:
1859 (inner1 AND inner2) AND (op2a code2 op2b)
1860 => (t AND inner2)
1861 If the partial result t is a constant, we win. Otherwise
1862 continue on to try reassociating with the other inner test. */
1863 if (is_and)
1865 if (integer_onep (t))
1866 return inner2;
1867 else if (integer_zerop (t))
1868 return boolean_false_node;
1871 /* Handle the OR case, where we are redistributing:
1872 (inner1 OR inner2) AND (op2a code2 op2b)
1873 => (t OR (inner2 AND (op2a code2 op2b))) */
1874 else
1876 if (integer_onep (t))
1877 return boolean_true_node;
1878 else
1879 /* Save partial result for later. */
1880 partial = t;
1884 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1885 if (TREE_CODE (inner2) == SSA_NAME
1886 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1887 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1888 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1889 gimple_assign_rhs1 (s),
1890 gimple_assign_rhs2 (s),
1891 code2, op2a, op2b)))
1893 /* Handle the AND case, where we are reassociating:
1894 (inner1 AND inner2) AND (op2a code2 op2b)
1895 => (inner1 AND t) */
1896 if (is_and)
1898 if (integer_onep (t))
1899 return inner1;
1900 else if (integer_zerop (t))
1901 return boolean_false_node;
1904 /* Handle the OR case. where we are redistributing:
1905 (inner1 OR inner2) AND (op2a code2 op2b)
1906 => (t OR (inner1 AND (op2a code2 op2b)))
1907 => (t OR partial) */
1908 else
1910 if (integer_onep (t))
1911 return boolean_true_node;
1912 else if (partial)
1914 /* We already got a simplification for the other
1915 operand to the redistributed OR expression. The
1916 interesting case is when at least one is false.
1917 Or, if both are the same, we can apply the identity
1918 (x OR x) == x. */
1919 if (integer_zerop (partial))
1920 return t;
1921 else if (integer_zerop (t))
1922 return partial;
1923 else if (same_bool_result_p (t, partial))
1924 return t;
1929 return NULL_TREE;
1932 /* Try to simplify the AND of two comparisons defined by
1933 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1934 If this can be done without constructing an intermediate value,
1935 return the resulting tree; otherwise NULL_TREE is returned.
1936 This function is deliberately asymmetric as it recurses on SSA_DEFs
1937 in the first comparison but not the second. */
1939 static tree
1940 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1941 enum tree_code code2, tree op2a, tree op2b)
1943 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1944 if (operand_equal_p (op1a, op2a, 0)
1945 && operand_equal_p (op1b, op2b, 0))
1947 tree t = combine_comparisons (UNKNOWN_LOCATION,
1948 TRUTH_ANDIF_EXPR, code1, code2,
1949 boolean_type_node, op1a, op1b);
1950 if (t)
1951 return t;
1954 /* Likewise the swapped case of the above. */
1955 if (operand_equal_p (op1a, op2b, 0)
1956 && operand_equal_p (op1b, op2a, 0))
1958 tree t = combine_comparisons (UNKNOWN_LOCATION,
1959 TRUTH_ANDIF_EXPR, code1,
1960 swap_tree_comparison (code2),
1961 boolean_type_node, op1a, op1b);
1962 if (t)
1963 return t;
1966 /* If both comparisons are of the same value against constants, we might
1967 be able to merge them. */
1968 if (operand_equal_p (op1a, op2a, 0)
1969 && TREE_CODE (op1b) == INTEGER_CST
1970 && TREE_CODE (op2b) == INTEGER_CST)
1972 int cmp = tree_int_cst_compare (op1b, op2b);
1974 /* If we have (op1a == op1b), we should either be able to
1975 return that or FALSE, depending on whether the constant op1b
1976 also satisfies the other comparison against op2b. */
1977 if (code1 == EQ_EXPR)
1979 bool done = true;
1980 bool val;
1981 switch (code2)
1983 case EQ_EXPR: val = (cmp == 0); break;
1984 case NE_EXPR: val = (cmp != 0); break;
1985 case LT_EXPR: val = (cmp < 0); break;
1986 case GT_EXPR: val = (cmp > 0); break;
1987 case LE_EXPR: val = (cmp <= 0); break;
1988 case GE_EXPR: val = (cmp >= 0); break;
1989 default: done = false;
1991 if (done)
1993 if (val)
1994 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1995 else
1996 return boolean_false_node;
1999 /* Likewise if the second comparison is an == comparison. */
2000 else if (code2 == EQ_EXPR)
2002 bool done = true;
2003 bool val;
2004 switch (code1)
2006 case EQ_EXPR: val = (cmp == 0); break;
2007 case NE_EXPR: val = (cmp != 0); break;
2008 case LT_EXPR: val = (cmp > 0); break;
2009 case GT_EXPR: val = (cmp < 0); break;
2010 case LE_EXPR: val = (cmp >= 0); break;
2011 case GE_EXPR: val = (cmp <= 0); break;
2012 default: done = false;
2014 if (done)
2016 if (val)
2017 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2018 else
2019 return boolean_false_node;
2023 /* Same business with inequality tests. */
2024 else if (code1 == NE_EXPR)
2026 bool val;
2027 switch (code2)
2029 case EQ_EXPR: val = (cmp != 0); break;
2030 case NE_EXPR: val = (cmp == 0); break;
2031 case LT_EXPR: val = (cmp >= 0); break;
2032 case GT_EXPR: val = (cmp <= 0); break;
2033 case LE_EXPR: val = (cmp > 0); break;
2034 case GE_EXPR: val = (cmp < 0); break;
2035 default:
2036 val = false;
2038 if (val)
2039 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2041 else if (code2 == NE_EXPR)
2043 bool val;
2044 switch (code1)
2046 case EQ_EXPR: val = (cmp == 0); break;
2047 case NE_EXPR: val = (cmp != 0); break;
2048 case LT_EXPR: val = (cmp <= 0); break;
2049 case GT_EXPR: val = (cmp >= 0); break;
2050 case LE_EXPR: val = (cmp < 0); break;
2051 case GE_EXPR: val = (cmp > 0); break;
2052 default:
2053 val = false;
2055 if (val)
2056 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2059 /* Chose the more restrictive of two < or <= comparisons. */
2060 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2061 && (code2 == LT_EXPR || code2 == LE_EXPR))
2063 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2064 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2065 else
2066 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2069 /* Likewise chose the more restrictive of two > or >= comparisons. */
2070 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2071 && (code2 == GT_EXPR || code2 == GE_EXPR))
2073 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2074 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2075 else
2076 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2079 /* Check for singleton ranges. */
2080 else if (cmp == 0
2081 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2082 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2083 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2085 /* Check for disjoint ranges. */
2086 else if (cmp <= 0
2087 && (code1 == LT_EXPR || code1 == LE_EXPR)
2088 && (code2 == GT_EXPR || code2 == GE_EXPR))
2089 return boolean_false_node;
2090 else if (cmp >= 0
2091 && (code1 == GT_EXPR || code1 == GE_EXPR)
2092 && (code2 == LT_EXPR || code2 == LE_EXPR))
2093 return boolean_false_node;
2096 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2097 NAME's definition is a truth value. See if there are any simplifications
2098 that can be done against the NAME's definition. */
2099 if (TREE_CODE (op1a) == SSA_NAME
2100 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2101 && (integer_zerop (op1b) || integer_onep (op1b)))
2103 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2104 || (code1 == NE_EXPR && integer_onep (op1b)));
2105 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2106 switch (gimple_code (stmt))
2108 case GIMPLE_ASSIGN:
2109 /* Try to simplify by copy-propagating the definition. */
2110 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2112 case GIMPLE_PHI:
2113 /* If every argument to the PHI produces the same result when
2114 ANDed with the second comparison, we win.
2115 Do not do this unless the type is bool since we need a bool
2116 result here anyway. */
2117 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2119 tree result = NULL_TREE;
2120 unsigned i;
2121 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2123 tree arg = gimple_phi_arg_def (stmt, i);
2125 /* If this PHI has itself as an argument, ignore it.
2126 If all the other args produce the same result,
2127 we're still OK. */
2128 if (arg == gimple_phi_result (stmt))
2129 continue;
2130 else if (TREE_CODE (arg) == INTEGER_CST)
2132 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2134 if (!result)
2135 result = boolean_false_node;
2136 else if (!integer_zerop (result))
2137 return NULL_TREE;
2139 else if (!result)
2140 result = fold_build2 (code2, boolean_type_node,
2141 op2a, op2b);
2142 else if (!same_bool_comparison_p (result,
2143 code2, op2a, op2b))
2144 return NULL_TREE;
2146 else if (TREE_CODE (arg) == SSA_NAME)
2148 tree temp = and_var_with_comparison (arg, invert,
2149 code2, op2a, op2b);
2150 if (!temp)
2151 return NULL_TREE;
2152 else if (!result)
2153 result = temp;
2154 else if (!same_bool_result_p (result, temp))
2155 return NULL_TREE;
2157 else
2158 return NULL_TREE;
2160 return result;
2163 default:
2164 break;
2167 return NULL_TREE;
2170 /* Try to simplify the AND of two comparisons, specified by
2171 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2172 If this can be simplified to a single expression (without requiring
2173 introducing more SSA variables to hold intermediate values),
2174 return the resulting tree. Otherwise return NULL_TREE.
2175 If the result expression is non-null, it has boolean type. */
2177 tree
2178 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2179 enum tree_code code2, tree op2a, tree op2b)
2181 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2182 if (t)
2183 return t;
2184 else
2185 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2188 /* Helper function for or_comparisons_1: try to simplify the OR of the
2189 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2190 If INVERT is true, invert the value of VAR before doing the OR.
2191 Return NULL_EXPR if we can't simplify this to a single expression. */
2193 static tree
2194 or_var_with_comparison (tree var, bool invert,
2195 enum tree_code code2, tree op2a, tree op2b)
2197 tree t;
2198 gimple stmt = SSA_NAME_DEF_STMT (var);
2200 /* We can only deal with variables whose definitions are assignments. */
2201 if (!is_gimple_assign (stmt))
2202 return NULL_TREE;
2204 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2205 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2206 Then we only have to consider the simpler non-inverted cases. */
2207 if (invert)
2208 t = and_var_with_comparison_1 (stmt,
2209 invert_tree_comparison (code2, false),
2210 op2a, op2b);
2211 else
2212 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2213 return canonicalize_bool (t, invert);
2216 /* Try to simplify the OR of the ssa variable defined by the assignment
2217 STMT with the comparison specified by (OP2A CODE2 OP2B).
2218 Return NULL_EXPR if we can't simplify this to a single expression. */
2220 static tree
2221 or_var_with_comparison_1 (gimple stmt,
2222 enum tree_code code2, tree op2a, tree op2b)
2224 tree var = gimple_assign_lhs (stmt);
2225 tree true_test_var = NULL_TREE;
2226 tree false_test_var = NULL_TREE;
2227 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2229 /* Check for identities like (var OR (var != 0)) => true . */
2230 if (TREE_CODE (op2a) == SSA_NAME
2231 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2233 if ((code2 == NE_EXPR && integer_zerop (op2b))
2234 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2236 true_test_var = op2a;
2237 if (var == true_test_var)
2238 return var;
2240 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2241 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2243 false_test_var = op2a;
2244 if (var == false_test_var)
2245 return boolean_true_node;
2249 /* If the definition is a comparison, recurse on it. */
2250 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2252 tree t = or_comparisons_1 (innercode,
2253 gimple_assign_rhs1 (stmt),
2254 gimple_assign_rhs2 (stmt),
2255 code2,
2256 op2a,
2257 op2b);
2258 if (t)
2259 return t;
2262 /* If the definition is an AND or OR expression, we may be able to
2263 simplify by reassociating. */
2264 if (innercode == TRUTH_AND_EXPR
2265 || innercode == TRUTH_OR_EXPR
2266 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2267 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2269 tree inner1 = gimple_assign_rhs1 (stmt);
2270 tree inner2 = gimple_assign_rhs2 (stmt);
2271 gimple s;
2272 tree t;
2273 tree partial = NULL_TREE;
2274 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2276 /* Check for boolean identities that don't require recursive examination
2277 of inner1/inner2:
2278 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2279 inner1 OR (inner1 AND inner2) => inner1
2280 !inner1 OR (inner1 OR inner2) => true
2281 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2283 if (inner1 == true_test_var)
2284 return (is_or ? var : inner1);
2285 else if (inner2 == true_test_var)
2286 return (is_or ? var : inner2);
2287 else if (inner1 == false_test_var)
2288 return (is_or
2289 ? boolean_true_node
2290 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2291 else if (inner2 == false_test_var)
2292 return (is_or
2293 ? boolean_true_node
2294 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2296 /* Next, redistribute/reassociate the OR across the inner tests.
2297 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2298 if (TREE_CODE (inner1) == SSA_NAME
2299 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2300 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2301 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2302 gimple_assign_rhs1 (s),
2303 gimple_assign_rhs2 (s),
2304 code2, op2a, op2b)))
2306 /* Handle the OR case, where we are reassociating:
2307 (inner1 OR inner2) OR (op2a code2 op2b)
2308 => (t OR inner2)
2309 If the partial result t is a constant, we win. Otherwise
2310 continue on to try reassociating with the other inner test. */
2311 if (innercode == TRUTH_OR_EXPR)
2313 if (integer_onep (t))
2314 return boolean_true_node;
2315 else if (integer_zerop (t))
2316 return inner2;
2319 /* Handle the AND case, where we are redistributing:
2320 (inner1 AND inner2) OR (op2a code2 op2b)
2321 => (t AND (inner2 OR (op2a code op2b))) */
2322 else
2324 if (integer_zerop (t))
2325 return boolean_false_node;
2326 else
2327 /* Save partial result for later. */
2328 partial = t;
2332 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2333 if (TREE_CODE (inner2) == SSA_NAME
2334 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2335 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2336 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2337 gimple_assign_rhs1 (s),
2338 gimple_assign_rhs2 (s),
2339 code2, op2a, op2b)))
2341 /* Handle the OR case, where we are reassociating:
2342 (inner1 OR inner2) OR (op2a code2 op2b)
2343 => (inner1 OR t) */
2344 if (innercode == TRUTH_OR_EXPR)
2346 if (integer_zerop (t))
2347 return inner1;
2348 else if (integer_onep (t))
2349 return boolean_true_node;
2352 /* Handle the AND case, where we are redistributing:
2353 (inner1 AND inner2) OR (op2a code2 op2b)
2354 => (t AND (inner1 OR (op2a code2 op2b)))
2355 => (t AND partial) */
2356 else
2358 if (integer_zerop (t))
2359 return boolean_false_node;
2360 else if (partial)
2362 /* We already got a simplification for the other
2363 operand to the redistributed AND expression. The
2364 interesting case is when at least one is true.
2365 Or, if both are the same, we can apply the identity
2366 (x AND x) == true. */
2367 if (integer_onep (partial))
2368 return t;
2369 else if (integer_onep (t))
2370 return partial;
2371 else if (same_bool_result_p (t, partial))
2372 return boolean_true_node;
2377 return NULL_TREE;
2380 /* Try to simplify the OR of two comparisons defined by
2381 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2382 If this can be done without constructing an intermediate value,
2383 return the resulting tree; otherwise NULL_TREE is returned.
2384 This function is deliberately asymmetric as it recurses on SSA_DEFs
2385 in the first comparison but not the second. */
2387 static tree
2388 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2389 enum tree_code code2, tree op2a, tree op2b)
2391 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2392 if (operand_equal_p (op1a, op2a, 0)
2393 && operand_equal_p (op1b, op2b, 0))
2395 tree t = combine_comparisons (UNKNOWN_LOCATION,
2396 TRUTH_ORIF_EXPR, code1, code2,
2397 boolean_type_node, op1a, op1b);
2398 if (t)
2399 return t;
2402 /* Likewise the swapped case of the above. */
2403 if (operand_equal_p (op1a, op2b, 0)
2404 && operand_equal_p (op1b, op2a, 0))
2406 tree t = combine_comparisons (UNKNOWN_LOCATION,
2407 TRUTH_ORIF_EXPR, code1,
2408 swap_tree_comparison (code2),
2409 boolean_type_node, op1a, op1b);
2410 if (t)
2411 return t;
2414 /* If both comparisons are of the same value against constants, we might
2415 be able to merge them. */
2416 if (operand_equal_p (op1a, op2a, 0)
2417 && TREE_CODE (op1b) == INTEGER_CST
2418 && TREE_CODE (op2b) == INTEGER_CST)
2420 int cmp = tree_int_cst_compare (op1b, op2b);
2422 /* If we have (op1a != op1b), we should either be able to
2423 return that or TRUE, depending on whether the constant op1b
2424 also satisfies the other comparison against op2b. */
2425 if (code1 == NE_EXPR)
2427 bool done = true;
2428 bool val;
2429 switch (code2)
2431 case EQ_EXPR: val = (cmp == 0); break;
2432 case NE_EXPR: val = (cmp != 0); break;
2433 case LT_EXPR: val = (cmp < 0); break;
2434 case GT_EXPR: val = (cmp > 0); break;
2435 case LE_EXPR: val = (cmp <= 0); break;
2436 case GE_EXPR: val = (cmp >= 0); break;
2437 default: done = false;
2439 if (done)
2441 if (val)
2442 return boolean_true_node;
2443 else
2444 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2447 /* Likewise if the second comparison is a != comparison. */
2448 else if (code2 == NE_EXPR)
2450 bool done = true;
2451 bool val;
2452 switch (code1)
2454 case EQ_EXPR: val = (cmp == 0); break;
2455 case NE_EXPR: val = (cmp != 0); break;
2456 case LT_EXPR: val = (cmp > 0); break;
2457 case GT_EXPR: val = (cmp < 0); break;
2458 case LE_EXPR: val = (cmp >= 0); break;
2459 case GE_EXPR: val = (cmp <= 0); break;
2460 default: done = false;
2462 if (done)
2464 if (val)
2465 return boolean_true_node;
2466 else
2467 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2471 /* See if an equality test is redundant with the other comparison. */
2472 else if (code1 == EQ_EXPR)
2474 bool val;
2475 switch (code2)
2477 case EQ_EXPR: val = (cmp == 0); break;
2478 case NE_EXPR: val = (cmp != 0); break;
2479 case LT_EXPR: val = (cmp < 0); break;
2480 case GT_EXPR: val = (cmp > 0); break;
2481 case LE_EXPR: val = (cmp <= 0); break;
2482 case GE_EXPR: val = (cmp >= 0); break;
2483 default:
2484 val = false;
2486 if (val)
2487 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2489 else if (code2 == EQ_EXPR)
2491 bool val;
2492 switch (code1)
2494 case EQ_EXPR: val = (cmp == 0); break;
2495 case NE_EXPR: val = (cmp != 0); break;
2496 case LT_EXPR: val = (cmp > 0); break;
2497 case GT_EXPR: val = (cmp < 0); break;
2498 case LE_EXPR: val = (cmp >= 0); break;
2499 case GE_EXPR: val = (cmp <= 0); break;
2500 default:
2501 val = false;
2503 if (val)
2504 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2507 /* Chose the less restrictive of two < or <= comparisons. */
2508 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2509 && (code2 == LT_EXPR || code2 == LE_EXPR))
2511 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2512 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2513 else
2514 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2517 /* Likewise chose the less restrictive of two > or >= comparisons. */
2518 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2519 && (code2 == GT_EXPR || code2 == GE_EXPR))
2521 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2522 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2523 else
2524 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2527 /* Check for singleton ranges. */
2528 else if (cmp == 0
2529 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2530 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2531 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2533 /* Check for less/greater pairs that don't restrict the range at all. */
2534 else if (cmp >= 0
2535 && (code1 == LT_EXPR || code1 == LE_EXPR)
2536 && (code2 == GT_EXPR || code2 == GE_EXPR))
2537 return boolean_true_node;
2538 else if (cmp <= 0
2539 && (code1 == GT_EXPR || code1 == GE_EXPR)
2540 && (code2 == LT_EXPR || code2 == LE_EXPR))
2541 return boolean_true_node;
2544 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2545 NAME's definition is a truth value. See if there are any simplifications
2546 that can be done against the NAME's definition. */
2547 if (TREE_CODE (op1a) == SSA_NAME
2548 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2549 && (integer_zerop (op1b) || integer_onep (op1b)))
2551 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2552 || (code1 == NE_EXPR && integer_onep (op1b)));
2553 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2554 switch (gimple_code (stmt))
2556 case GIMPLE_ASSIGN:
2557 /* Try to simplify by copy-propagating the definition. */
2558 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2560 case GIMPLE_PHI:
2561 /* If every argument to the PHI produces the same result when
2562 ORed with the second comparison, we win.
2563 Do not do this unless the type is bool since we need a bool
2564 result here anyway. */
2565 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2567 tree result = NULL_TREE;
2568 unsigned i;
2569 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2571 tree arg = gimple_phi_arg_def (stmt, i);
2573 /* If this PHI has itself as an argument, ignore it.
2574 If all the other args produce the same result,
2575 we're still OK. */
2576 if (arg == gimple_phi_result (stmt))
2577 continue;
2578 else if (TREE_CODE (arg) == INTEGER_CST)
2580 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2582 if (!result)
2583 result = boolean_true_node;
2584 else if (!integer_onep (result))
2585 return NULL_TREE;
2587 else if (!result)
2588 result = fold_build2 (code2, boolean_type_node,
2589 op2a, op2b);
2590 else if (!same_bool_comparison_p (result,
2591 code2, op2a, op2b))
2592 return NULL_TREE;
2594 else if (TREE_CODE (arg) == SSA_NAME)
2596 tree temp = or_var_with_comparison (arg, invert,
2597 code2, op2a, op2b);
2598 if (!temp)
2599 return NULL_TREE;
2600 else if (!result)
2601 result = temp;
2602 else if (!same_bool_result_p (result, temp))
2603 return NULL_TREE;
2605 else
2606 return NULL_TREE;
2608 return result;
2611 default:
2612 break;
2615 return NULL_TREE;
2618 /* Try to simplify the OR of two comparisons, specified by
2619 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2620 If this can be simplified to a single expression (without requiring
2621 introducing more SSA variables to hold intermediate values),
2622 return the resulting tree. Otherwise return NULL_TREE.
2623 If the result expression is non-null, it has boolean type. */
2625 tree
2626 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2627 enum tree_code code2, tree op2a, tree op2b)
2629 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2630 if (t)
2631 return t;
2632 else
2633 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);