Don't crash if a constant initializer refers to the constant.
[official-gcc.git] / gcc / gimple-fold.c
blobb6c06fca165e4f6a04b7a87b31a6a04f8f229036
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
34 /* Return true when DECL can be referenced from current unit.
35 We can get declarations that are not possible to reference for
36 various reasons:
38 1) When analyzing C++ virtual tables.
39 C++ virtual tables do have known constructors even
40 when they are keyed to other compilation unit.
41 Those tables can contain pointers to methods and vars
42 in other units. Those methods have both STATIC and EXTERNAL
43 set.
44 2) In WHOPR mode devirtualization might lead to reference
45 to method that was partitioned elsehwere.
46 In this case we have static VAR_DECL or FUNCTION_DECL
47 that has no corresponding callgraph/varpool node
48 declaring the body.
49 3) COMDAT functions referred by external vtables that
50 we devirtualize only during final copmilation stage.
51 At this time we already decided that we will not output
52 the function body and thus we can't reference the symbol
53 directly. */
55 static bool
56 can_refer_decl_in_current_unit_p (tree decl)
58 struct varpool_node *vnode;
59 struct cgraph_node *node;
61 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
62 return true;
63 /* External flag is set, so we deal with C++ reference
64 to static object from other file. */
65 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
66 && TREE_CODE (decl) == VAR_DECL)
68 /* Just be sure it is not big in frontend setting
69 flags incorrectly. Those variables should never
70 be finalized. */
71 gcc_checking_assert (!(vnode = varpool_get_node (decl))
72 || !vnode->finalized);
73 return false;
75 /* When function is public, we always can introduce new reference.
76 Exception are the COMDAT functions where introducing a direct
77 reference imply need to include function body in the curren tunit. */
78 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
79 return true;
80 /* We are not at ltrans stage; so don't worry about WHOPR.
81 Also when still gimplifying all referred comdat functions will be
82 produced. */
83 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
84 return true;
85 /* If we already output the function body, we are safe. */
86 if (TREE_ASM_WRITTEN (decl))
87 return true;
88 if (TREE_CODE (decl) == FUNCTION_DECL)
90 node = cgraph_get_node (decl);
91 /* Check that we still have function body and that we didn't took
92 the decision to eliminate offline copy of the function yet.
93 The second is important when devirtualization happens during final
94 compilation stage when making a new reference no longer makes callee
95 to be compiled. */
96 if (!node || !node->analyzed || node->global.inlined_to)
97 return false;
99 else if (TREE_CODE (decl) == VAR_DECL)
101 vnode = varpool_get_node (decl);
102 if (!vnode || !vnode->finalized)
103 return false;
105 return true;
108 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
109 acceptable form for is_gimple_min_invariant. */
111 tree
112 canonicalize_constructor_val (tree cval)
114 STRIP_NOPS (cval);
115 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
117 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
118 TREE_OPERAND (cval, 0),
119 TREE_OPERAND (cval, 1),
120 TREE_TYPE (cval));
121 if (t)
122 cval = t;
124 if (TREE_CODE (cval) == ADDR_EXPR)
126 tree base = get_base_address (TREE_OPERAND (cval, 0));
128 if (base
129 && (TREE_CODE (base) == VAR_DECL
130 || TREE_CODE (base) == FUNCTION_DECL)
131 && !can_refer_decl_in_current_unit_p (base))
132 return NULL_TREE;
133 if (base && TREE_CODE (base) == VAR_DECL)
134 add_referenced_var (base);
135 /* We never have the chance to fixup types in global initializers
136 during gimplification. Do so here. */
137 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
138 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
140 return cval;
143 /* If SYM is a constant variable with known value, return the value.
144 NULL_TREE is returned otherwise. */
146 tree
147 get_symbol_constant_value (tree sym)
149 if (const_value_known_p (sym))
151 tree val = DECL_INITIAL (sym);
152 if (val)
154 val = canonicalize_constructor_val (val);
155 if (val && is_gimple_min_invariant (val))
156 return val;
157 else
158 return NULL_TREE;
160 /* Variables declared 'const' without an initializer
161 have zero as the initializer if they may not be
162 overridden at link or run time. */
163 if (!val
164 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
165 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
166 return build_zero_cst (TREE_TYPE (sym));
169 return NULL_TREE;
173 /* Return true if we may propagate the address expression ADDR into the
174 dereference DEREF and cancel them. */
176 bool
177 may_propagate_address_into_dereference (tree addr, tree deref)
179 gcc_assert (TREE_CODE (deref) == MEM_REF
180 && TREE_CODE (addr) == ADDR_EXPR);
182 /* Don't propagate if ADDR's operand has incomplete type. */
183 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
184 return false;
186 /* If the address is invariant then we do not need to preserve restrict
187 qualifications. But we do need to preserve volatile qualifiers until
188 we can annotate the folded dereference itself properly. */
189 if (is_gimple_min_invariant (addr)
190 && (!TREE_THIS_VOLATILE (deref)
191 || TYPE_VOLATILE (TREE_TYPE (addr))))
192 return useless_type_conversion_p (TREE_TYPE (deref),
193 TREE_TYPE (TREE_OPERAND (addr, 0)));
195 /* Else both the address substitution and the folding must result in
196 a valid useless type conversion sequence. */
197 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
198 TREE_TYPE (addr))
199 && useless_type_conversion_p (TREE_TYPE (deref),
200 TREE_TYPE (TREE_OPERAND (addr, 0))));
204 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
205 BASE is an array type. OFFSET is a byte displacement.
207 LOC is the location of the original expression. */
209 static tree
210 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
212 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
213 tree array_type, elt_type, elt_size;
214 tree domain_type;
216 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
217 measured in units of the size of elements type) from that ARRAY_REF).
218 We can't do anything if either is variable.
220 The case we handle here is *(&A[N]+O). */
221 if (TREE_CODE (base) == ARRAY_REF)
223 tree low_bound = array_ref_low_bound (base);
225 elt_offset = TREE_OPERAND (base, 1);
226 if (TREE_CODE (low_bound) != INTEGER_CST
227 || TREE_CODE (elt_offset) != INTEGER_CST)
228 return NULL_TREE;
230 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
231 base = TREE_OPERAND (base, 0);
234 /* Ignore stupid user tricks of indexing non-array variables. */
235 array_type = TREE_TYPE (base);
236 if (TREE_CODE (array_type) != ARRAY_TYPE)
237 return NULL_TREE;
238 elt_type = TREE_TYPE (array_type);
240 /* Use signed size type for intermediate computation on the index. */
241 idx_type = ssizetype;
243 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
244 element type (so we can use the alignment if it's not constant).
245 Otherwise, compute the offset as an index by using a division. If the
246 division isn't exact, then don't do anything. */
247 elt_size = TYPE_SIZE_UNIT (elt_type);
248 if (!elt_size)
249 return NULL;
250 if (integer_zerop (offset))
252 if (TREE_CODE (elt_size) != INTEGER_CST)
253 elt_size = size_int (TYPE_ALIGN (elt_type));
255 idx = build_int_cst (idx_type, 0);
257 else
259 unsigned HOST_WIDE_INT lquo, lrem;
260 HOST_WIDE_INT hquo, hrem;
261 double_int soffset;
263 /* The final array offset should be signed, so we need
264 to sign-extend the (possibly pointer) offset here
265 and use signed division. */
266 soffset = double_int_sext (tree_to_double_int (offset),
267 TYPE_PRECISION (TREE_TYPE (offset)));
268 if (TREE_CODE (elt_size) != INTEGER_CST
269 || div_and_round_double (TRUNC_DIV_EXPR, 0,
270 soffset.low, soffset.high,
271 TREE_INT_CST_LOW (elt_size),
272 TREE_INT_CST_HIGH (elt_size),
273 &lquo, &hquo, &lrem, &hrem)
274 || lrem || hrem)
275 return NULL_TREE;
277 idx = build_int_cst_wide (idx_type, lquo, hquo);
280 /* Assume the low bound is zero. If there is a domain type, get the
281 low bound, if any, convert the index into that type, and add the
282 low bound. */
283 min_idx = build_int_cst (idx_type, 0);
284 domain_type = TYPE_DOMAIN (array_type);
285 if (domain_type)
287 idx_type = domain_type;
288 if (TYPE_MIN_VALUE (idx_type))
289 min_idx = TYPE_MIN_VALUE (idx_type);
290 else
291 min_idx = fold_convert (idx_type, min_idx);
293 if (TREE_CODE (min_idx) != INTEGER_CST)
294 return NULL_TREE;
296 elt_offset = fold_convert (idx_type, elt_offset);
299 if (!integer_zerop (min_idx))
300 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
301 if (!integer_zerop (elt_offset))
302 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
304 /* Make sure to possibly truncate late after offsetting. */
305 idx = fold_convert (idx_type, idx);
307 /* We don't want to construct access past array bounds. For example
308 char *(c[4]);
309 c[3][2];
310 should not be simplified into (*c)[14] or tree-vrp will
311 give false warnings.
312 This is only an issue for multi-dimensional arrays. */
313 if (TREE_CODE (elt_type) == ARRAY_TYPE
314 && domain_type)
316 if (TYPE_MAX_VALUE (domain_type)
317 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
318 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
319 return NULL_TREE;
320 else if (TYPE_MIN_VALUE (domain_type)
321 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
322 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
323 return NULL_TREE;
324 else if (compare_tree_int (idx, 0) < 0)
325 return NULL_TREE;
329 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
330 SET_EXPR_LOCATION (t, loc);
331 return t;
336 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
337 LOC is the location of original expression.
339 Before attempting the conversion strip off existing ADDR_EXPRs. */
341 tree
342 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
343 tree orig_type)
345 tree ret;
347 STRIP_NOPS (base);
348 if (TREE_CODE (base) != ADDR_EXPR)
349 return NULL_TREE;
351 base = TREE_OPERAND (base, 0);
352 if (types_compatible_p (orig_type, TREE_TYPE (base))
353 && integer_zerop (offset))
354 return base;
356 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
357 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
358 return ret;
359 return NULL_TREE;
362 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
363 LOC is the location of the original expression. */
365 tree
366 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
367 tree orig_type)
369 tree base, ret;
371 STRIP_NOPS (addr);
372 if (TREE_CODE (addr) != ADDR_EXPR)
373 return NULL_TREE;
374 base = TREE_OPERAND (addr, 0);
375 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
376 if (ret)
378 ret = build_fold_addr_expr (ret);
379 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
380 return NULL_TREE;
381 SET_EXPR_LOCATION (ret, loc);
384 return ret;
388 /* A quaint feature extant in our address arithmetic is that there
389 can be hidden type changes here. The type of the result need
390 not be the same as the type of the input pointer.
392 What we're after here is an expression of the form
393 (T *)(&array + const)
394 where array is OP0, const is OP1, RES_TYPE is T and
395 the cast doesn't actually exist, but is implicit in the
396 type of the POINTER_PLUS_EXPR. We'd like to turn this into
397 &array[x]
398 which may be able to propagate further. */
400 tree
401 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
403 tree ptd_type;
404 tree t;
406 /* The first operand should be an ADDR_EXPR. */
407 if (TREE_CODE (op0) != ADDR_EXPR)
408 return NULL_TREE;
409 op0 = TREE_OPERAND (op0, 0);
411 /* It had better be a constant. */
412 if (TREE_CODE (op1) != INTEGER_CST)
414 /* Or op0 should now be A[0] and the non-constant offset defined
415 via a multiplication by the array element size. */
416 if (TREE_CODE (op0) == ARRAY_REF
417 /* As we will end up creating a variable index array access
418 in the outermost array dimension make sure there isn't
419 a more inner array that the index could overflow to. */
420 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
421 && integer_zerop (TREE_OPERAND (op0, 1))
422 && TREE_CODE (op1) == SSA_NAME)
424 gimple offset_def = SSA_NAME_DEF_STMT (op1);
425 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
426 if (!host_integerp (elsz, 1)
427 || !is_gimple_assign (offset_def))
428 return NULL_TREE;
430 /* Do not build array references of something that we can't
431 see the true number of array dimensions for. */
432 if (!DECL_P (TREE_OPERAND (op0, 0))
433 && !handled_component_p (TREE_OPERAND (op0, 0)))
434 return NULL_TREE;
436 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
437 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
438 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
439 return build_fold_addr_expr
440 (build4 (ARRAY_REF, TREE_TYPE (op0),
441 TREE_OPERAND (op0, 0),
442 gimple_assign_rhs1 (offset_def),
443 TREE_OPERAND (op0, 2),
444 TREE_OPERAND (op0, 3)));
445 else if (integer_onep (elsz)
446 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
447 return build_fold_addr_expr
448 (build4 (ARRAY_REF, TREE_TYPE (op0),
449 TREE_OPERAND (op0, 0),
450 op1,
451 TREE_OPERAND (op0, 2),
452 TREE_OPERAND (op0, 3)));
454 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
455 /* Dto. */
456 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
457 && TREE_CODE (op1) == SSA_NAME)
459 gimple offset_def = SSA_NAME_DEF_STMT (op1);
460 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
461 if (!host_integerp (elsz, 1)
462 || !is_gimple_assign (offset_def))
463 return NULL_TREE;
465 /* Do not build array references of something that we can't
466 see the true number of array dimensions for. */
467 if (!DECL_P (op0)
468 && !handled_component_p (op0))
469 return NULL_TREE;
471 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
472 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
473 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
474 return build_fold_addr_expr
475 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
476 op0, gimple_assign_rhs1 (offset_def),
477 integer_zero_node, NULL_TREE));
478 else if (integer_onep (elsz)
479 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
480 return build_fold_addr_expr
481 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
482 op0, op1,
483 integer_zero_node, NULL_TREE));
486 return NULL_TREE;
489 /* If the first operand is an ARRAY_REF, expand it so that we can fold
490 the offset into it. */
491 while (TREE_CODE (op0) == ARRAY_REF)
493 tree array_obj = TREE_OPERAND (op0, 0);
494 tree array_idx = TREE_OPERAND (op0, 1);
495 tree elt_type = TREE_TYPE (op0);
496 tree elt_size = TYPE_SIZE_UNIT (elt_type);
497 tree min_idx;
499 if (TREE_CODE (array_idx) != INTEGER_CST)
500 break;
501 if (TREE_CODE (elt_size) != INTEGER_CST)
502 break;
504 /* Un-bias the index by the min index of the array type. */
505 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
506 if (min_idx)
508 min_idx = TYPE_MIN_VALUE (min_idx);
509 if (min_idx)
511 if (TREE_CODE (min_idx) != INTEGER_CST)
512 break;
514 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
515 if (!integer_zerop (min_idx))
516 array_idx = int_const_binop (MINUS_EXPR, array_idx,
517 min_idx, 0);
521 /* Convert the index to a byte offset. */
522 array_idx = fold_convert (sizetype, array_idx);
523 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
525 /* Update the operands for the next round, or for folding. */
526 op1 = int_const_binop (PLUS_EXPR,
527 array_idx, op1, 0);
528 op0 = array_obj;
531 ptd_type = TREE_TYPE (res_type);
532 /* If we want a pointer to void, reconstruct the reference from the
533 array element type. A pointer to that can be trivially converted
534 to void *. This happens as we fold (void *)(ptr p+ off). */
535 if (VOID_TYPE_P (ptd_type)
536 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
537 ptd_type = TREE_TYPE (TREE_TYPE (op0));
539 /* At which point we can try some of the same things as for indirects. */
540 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
541 if (t)
543 t = build_fold_addr_expr (t);
544 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
545 return NULL_TREE;
546 SET_EXPR_LOCATION (t, loc);
549 return t;
552 /* Subroutine of fold_stmt. We perform several simplifications of the
553 memory reference tree EXPR and make sure to re-gimplify them properly
554 after propagation of constant addresses. IS_LHS is true if the
555 reference is supposed to be an lvalue. */
557 static tree
558 maybe_fold_reference (tree expr, bool is_lhs)
560 tree *t = &expr;
561 tree result;
563 if (!is_lhs
564 && (result = fold_const_aggregate_ref (expr))
565 && is_gimple_min_invariant (result))
566 return result;
568 /* ??? We might want to open-code the relevant remaining cases
569 to avoid using the generic fold. */
570 if (handled_component_p (*t)
571 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
573 tree tem = fold (*t);
574 if (tem != *t)
575 return tem;
578 while (handled_component_p (*t))
579 t = &TREE_OPERAND (*t, 0);
581 /* Fold back MEM_REFs to reference trees. */
582 if (TREE_CODE (*t) == MEM_REF
583 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
584 && integer_zerop (TREE_OPERAND (*t, 1))
585 && (TREE_THIS_VOLATILE (*t)
586 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
587 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
588 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
589 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
590 /* We have to look out here to not drop a required conversion
591 from the rhs to the lhs if is_lhs, but we don't have the
592 rhs here to verify that. Thus require strict type
593 compatibility. */
594 && types_compatible_p (TREE_TYPE (*t),
595 TREE_TYPE (TREE_OPERAND
596 (TREE_OPERAND (*t, 0), 0))))
598 tree tem;
599 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
600 tem = maybe_fold_reference (expr, is_lhs);
601 if (tem)
602 return tem;
603 return expr;
605 /* Canonicalize MEM_REFs invariant address operand. */
606 else if (TREE_CODE (*t) == MEM_REF
607 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
609 bool volatile_p = TREE_THIS_VOLATILE (*t);
610 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
611 TREE_OPERAND (*t, 0),
612 TREE_OPERAND (*t, 1));
613 if (tem)
615 TREE_THIS_VOLATILE (tem) = volatile_p;
616 *t = tem;
617 tem = maybe_fold_reference (expr, is_lhs);
618 if (tem)
619 return tem;
620 return expr;
623 else if (TREE_CODE (*t) == TARGET_MEM_REF)
625 tree tem = maybe_fold_tmr (*t);
626 if (tem)
628 *t = tem;
629 tem = maybe_fold_reference (expr, is_lhs);
630 if (tem)
631 return tem;
632 return expr;
635 else if (!is_lhs
636 && DECL_P (*t))
638 tree tem = get_symbol_constant_value (*t);
639 if (tem
640 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
642 *t = unshare_expr (tem);
643 tem = maybe_fold_reference (expr, is_lhs);
644 if (tem)
645 return tem;
646 return expr;
650 return NULL_TREE;
654 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
655 replacement rhs for the statement or NULL_TREE if no simplification
656 could be made. It is assumed that the operands have been previously
657 folded. */
659 static tree
660 fold_gimple_assign (gimple_stmt_iterator *si)
662 gimple stmt = gsi_stmt (*si);
663 enum tree_code subcode = gimple_assign_rhs_code (stmt);
664 location_t loc = gimple_location (stmt);
666 tree result = NULL_TREE;
668 switch (get_gimple_rhs_class (subcode))
670 case GIMPLE_SINGLE_RHS:
672 tree rhs = gimple_assign_rhs1 (stmt);
674 /* Try to fold a conditional expression. */
675 if (TREE_CODE (rhs) == COND_EXPR)
677 tree op0 = COND_EXPR_COND (rhs);
678 tree tem;
679 bool set = false;
680 location_t cond_loc = EXPR_LOCATION (rhs);
682 if (COMPARISON_CLASS_P (op0))
684 fold_defer_overflow_warnings ();
685 tem = fold_binary_loc (cond_loc,
686 TREE_CODE (op0), TREE_TYPE (op0),
687 TREE_OPERAND (op0, 0),
688 TREE_OPERAND (op0, 1));
689 /* This is actually a conditional expression, not a GIMPLE
690 conditional statement, however, the valid_gimple_rhs_p
691 test still applies. */
692 set = (tem && is_gimple_condexpr (tem)
693 && valid_gimple_rhs_p (tem));
694 fold_undefer_overflow_warnings (set, stmt, 0);
696 else if (is_gimple_min_invariant (op0))
698 tem = op0;
699 set = true;
701 else
702 return NULL_TREE;
704 if (set)
705 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
706 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
709 else if (REFERENCE_CLASS_P (rhs))
710 return maybe_fold_reference (rhs, false);
712 else if (TREE_CODE (rhs) == ADDR_EXPR)
714 tree ref = TREE_OPERAND (rhs, 0);
715 tree tem = maybe_fold_reference (ref, true);
716 if (tem
717 && TREE_CODE (tem) == MEM_REF
718 && integer_zerop (TREE_OPERAND (tem, 1)))
719 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
720 else if (tem)
721 result = fold_convert (TREE_TYPE (rhs),
722 build_fold_addr_expr_loc (loc, tem));
723 else if (TREE_CODE (ref) == MEM_REF
724 && integer_zerop (TREE_OPERAND (ref, 1)))
725 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
728 else if (TREE_CODE (rhs) == CONSTRUCTOR
729 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
730 && (CONSTRUCTOR_NELTS (rhs)
731 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
733 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
734 unsigned i;
735 tree val;
737 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
738 if (TREE_CODE (val) != INTEGER_CST
739 && TREE_CODE (val) != REAL_CST
740 && TREE_CODE (val) != FIXED_CST)
741 return NULL_TREE;
743 return build_vector_from_ctor (TREE_TYPE (rhs),
744 CONSTRUCTOR_ELTS (rhs));
747 else if (DECL_P (rhs))
748 return unshare_expr (get_symbol_constant_value (rhs));
750 /* If we couldn't fold the RHS, hand over to the generic
751 fold routines. */
752 if (result == NULL_TREE)
753 result = fold (rhs);
755 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
756 that may have been added by fold, and "useless" type
757 conversions that might now be apparent due to propagation. */
758 STRIP_USELESS_TYPE_CONVERSION (result);
760 if (result != rhs && valid_gimple_rhs_p (result))
761 return result;
763 return NULL_TREE;
765 break;
767 case GIMPLE_UNARY_RHS:
769 tree rhs = gimple_assign_rhs1 (stmt);
771 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
772 if (result)
774 /* If the operation was a conversion do _not_ mark a
775 resulting constant with TREE_OVERFLOW if the original
776 constant was not. These conversions have implementation
777 defined behavior and retaining the TREE_OVERFLOW flag
778 here would confuse later passes such as VRP. */
779 if (CONVERT_EXPR_CODE_P (subcode)
780 && TREE_CODE (result) == INTEGER_CST
781 && TREE_CODE (rhs) == INTEGER_CST)
782 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
784 STRIP_USELESS_TYPE_CONVERSION (result);
785 if (valid_gimple_rhs_p (result))
786 return result;
788 else if (CONVERT_EXPR_CODE_P (subcode)
789 && POINTER_TYPE_P (gimple_expr_type (stmt))
790 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
792 tree type = gimple_expr_type (stmt);
793 tree t = maybe_fold_offset_to_address (loc,
794 gimple_assign_rhs1 (stmt),
795 integer_zero_node, type);
796 if (t)
797 return t;
800 break;
802 case GIMPLE_BINARY_RHS:
803 /* Try to fold pointer addition. */
804 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
806 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
807 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
809 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
810 if (!useless_type_conversion_p
811 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
812 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
814 result = maybe_fold_stmt_addition (gimple_location (stmt),
815 type,
816 gimple_assign_rhs1 (stmt),
817 gimple_assign_rhs2 (stmt));
820 if (!result)
821 result = fold_binary_loc (loc, subcode,
822 TREE_TYPE (gimple_assign_lhs (stmt)),
823 gimple_assign_rhs1 (stmt),
824 gimple_assign_rhs2 (stmt));
826 if (result)
828 STRIP_USELESS_TYPE_CONVERSION (result);
829 if (valid_gimple_rhs_p (result))
830 return result;
832 /* Fold might have produced non-GIMPLE, so if we trust it blindly
833 we lose canonicalization opportunities. Do not go again
834 through fold here though, or the same non-GIMPLE will be
835 produced. */
836 if (commutative_tree_code (subcode)
837 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
838 gimple_assign_rhs2 (stmt), false))
839 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
840 gimple_assign_rhs2 (stmt),
841 gimple_assign_rhs1 (stmt));
843 break;
845 case GIMPLE_TERNARY_RHS:
846 result = fold_ternary_loc (loc, subcode,
847 TREE_TYPE (gimple_assign_lhs (stmt)),
848 gimple_assign_rhs1 (stmt),
849 gimple_assign_rhs2 (stmt),
850 gimple_assign_rhs3 (stmt));
852 if (result)
854 STRIP_USELESS_TYPE_CONVERSION (result);
855 if (valid_gimple_rhs_p (result))
856 return result;
858 /* Fold might have produced non-GIMPLE, so if we trust it blindly
859 we lose canonicalization opportunities. Do not go again
860 through fold here though, or the same non-GIMPLE will be
861 produced. */
862 if (commutative_ternary_tree_code (subcode)
863 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
864 gimple_assign_rhs2 (stmt), false))
865 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
866 gimple_assign_rhs2 (stmt),
867 gimple_assign_rhs1 (stmt),
868 gimple_assign_rhs3 (stmt));
870 break;
872 case GIMPLE_INVALID_RHS:
873 gcc_unreachable ();
876 return NULL_TREE;
879 /* Attempt to fold a conditional statement. Return true if any changes were
880 made. We only attempt to fold the condition expression, and do not perform
881 any transformation that would require alteration of the cfg. It is
882 assumed that the operands have been previously folded. */
884 static bool
885 fold_gimple_cond (gimple stmt)
887 tree result = fold_binary_loc (gimple_location (stmt),
888 gimple_cond_code (stmt),
889 boolean_type_node,
890 gimple_cond_lhs (stmt),
891 gimple_cond_rhs (stmt));
893 if (result)
895 STRIP_USELESS_TYPE_CONVERSION (result);
896 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
898 gimple_cond_set_condition_from_tree (stmt, result);
899 return true;
903 return false;
906 /* Convert EXPR into a GIMPLE value suitable for substitution on the
907 RHS of an assignment. Insert the necessary statements before
908 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
909 is replaced. If the call is expected to produces a result, then it
910 is replaced by an assignment of the new RHS to the result variable.
911 If the result is to be ignored, then the call is replaced by a
912 GIMPLE_NOP. A proper VDEF chain is retained by making the first
913 VUSE and the last VDEF of the whole sequence be the same as the replaced
914 statement and using new SSA names for stores in between. */
916 void
917 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
919 tree lhs;
920 tree tmp = NULL_TREE; /* Silence warning. */
921 gimple stmt, new_stmt;
922 gimple_stmt_iterator i;
923 gimple_seq stmts = gimple_seq_alloc();
924 struct gimplify_ctx gctx;
925 gimple last = NULL;
926 gimple laststore = NULL;
927 tree reaching_vuse;
929 stmt = gsi_stmt (*si_p);
931 gcc_assert (is_gimple_call (stmt));
933 lhs = gimple_call_lhs (stmt);
934 reaching_vuse = gimple_vuse (stmt);
936 push_gimplify_context (&gctx);
938 if (lhs == NULL_TREE)
940 gimplify_and_add (expr, &stmts);
941 /* We can end up with folding a memcpy of an empty class assignment
942 which gets optimized away by C++ gimplification. */
943 if (gimple_seq_empty_p (stmts))
945 if (gimple_in_ssa_p (cfun))
947 unlink_stmt_vdef (stmt);
948 release_defs (stmt);
950 gsi_remove (si_p, true);
951 return;
954 else
955 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
957 pop_gimplify_context (NULL);
959 if (gimple_has_location (stmt))
960 annotate_all_with_location (stmts, gimple_location (stmt));
962 /* The replacement can expose previously unreferenced variables. */
963 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
965 if (last)
967 gsi_insert_before (si_p, last, GSI_NEW_STMT);
968 gsi_next (si_p);
970 new_stmt = gsi_stmt (i);
971 if (gimple_in_ssa_p (cfun))
973 find_new_referenced_vars (new_stmt);
974 mark_symbols_for_renaming (new_stmt);
976 /* If the new statement has a VUSE, update it with exact SSA name we
977 know will reach this one. */
978 if (gimple_vuse (new_stmt))
980 /* If we've also seen a previous store create a new VDEF for
981 the latter one, and make that the new reaching VUSE. */
982 if (laststore)
984 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
985 gimple_set_vdef (laststore, reaching_vuse);
986 update_stmt (laststore);
987 laststore = NULL;
989 gimple_set_vuse (new_stmt, reaching_vuse);
990 gimple_set_modified (new_stmt, true);
992 if (gimple_assign_single_p (new_stmt)
993 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
995 laststore = new_stmt;
997 last = new_stmt;
1000 if (lhs == NULL_TREE)
1002 /* If we replace a call without LHS that has a VDEF and our new
1003 sequence ends with a store we must make that store have the same
1004 vdef in order not to break the sequencing. This can happen
1005 for instance when folding memcpy calls into assignments. */
1006 if (gimple_vdef (stmt) && laststore)
1008 gimple_set_vdef (laststore, gimple_vdef (stmt));
1009 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1010 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1011 update_stmt (laststore);
1013 else if (gimple_in_ssa_p (cfun))
1015 unlink_stmt_vdef (stmt);
1016 release_defs (stmt);
1018 new_stmt = last;
1020 else
1022 if (last)
1024 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1025 gsi_next (si_p);
1027 if (laststore && is_gimple_reg (lhs))
1029 gimple_set_vdef (laststore, gimple_vdef (stmt));
1030 update_stmt (laststore);
1031 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1032 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1033 laststore = NULL;
1035 else if (laststore)
1037 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1038 gimple_set_vdef (laststore, reaching_vuse);
1039 update_stmt (laststore);
1040 laststore = NULL;
1042 new_stmt = gimple_build_assign (lhs, tmp);
1043 if (!is_gimple_reg (tmp))
1044 gimple_set_vuse (new_stmt, reaching_vuse);
1045 if (!is_gimple_reg (lhs))
1047 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1048 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1049 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1051 else if (reaching_vuse == gimple_vuse (stmt))
1052 unlink_stmt_vdef (stmt);
1055 gimple_set_location (new_stmt, gimple_location (stmt));
1056 gsi_replace (si_p, new_stmt, false);
1059 /* Return the string length, maximum string length or maximum value of
1060 ARG in LENGTH.
1061 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1062 is not NULL and, for TYPE == 0, its value is not equal to the length
1063 we determine or if we are unable to determine the length or value,
1064 return false. VISITED is a bitmap of visited variables.
1065 TYPE is 0 if string length should be returned, 1 for maximum string
1066 length and 2 for maximum value ARG can have. */
1068 static bool
1069 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1071 tree var, val;
1072 gimple def_stmt;
1074 if (TREE_CODE (arg) != SSA_NAME)
1076 if (TREE_CODE (arg) == COND_EXPR)
1077 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1078 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1079 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1080 else if (TREE_CODE (arg) == ADDR_EXPR
1081 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1082 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1084 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1085 if (TREE_CODE (aop0) == INDIRECT_REF
1086 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1087 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1088 length, visited, type);
1091 if (type == 2)
1093 val = arg;
1094 if (TREE_CODE (val) != INTEGER_CST
1095 || tree_int_cst_sgn (val) < 0)
1096 return false;
1098 else
1099 val = c_strlen (arg, 1);
1100 if (!val)
1101 return false;
1103 if (*length)
1105 if (type > 0)
1107 if (TREE_CODE (*length) != INTEGER_CST
1108 || TREE_CODE (val) != INTEGER_CST)
1109 return false;
1111 if (tree_int_cst_lt (*length, val))
1112 *length = val;
1113 return true;
1115 else if (simple_cst_equal (val, *length) != 1)
1116 return false;
1119 *length = val;
1120 return true;
1123 /* If we were already here, break the infinite cycle. */
1124 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1125 return true;
1127 var = arg;
1128 def_stmt = SSA_NAME_DEF_STMT (var);
1130 switch (gimple_code (def_stmt))
1132 case GIMPLE_ASSIGN:
1133 /* The RHS of the statement defining VAR must either have a
1134 constant length or come from another SSA_NAME with a constant
1135 length. */
1136 if (gimple_assign_single_p (def_stmt)
1137 || gimple_assign_unary_nop_p (def_stmt))
1139 tree rhs = gimple_assign_rhs1 (def_stmt);
1140 return get_maxval_strlen (rhs, length, visited, type);
1142 return false;
1144 case GIMPLE_PHI:
1146 /* All the arguments of the PHI node must have the same constant
1147 length. */
1148 unsigned i;
1150 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1152 tree arg = gimple_phi_arg (def_stmt, i)->def;
1154 /* If this PHI has itself as an argument, we cannot
1155 determine the string length of this argument. However,
1156 if we can find a constant string length for the other
1157 PHI args then we can still be sure that this is a
1158 constant string length. So be optimistic and just
1159 continue with the next argument. */
1160 if (arg == gimple_phi_result (def_stmt))
1161 continue;
1163 if (!get_maxval_strlen (arg, length, visited, type))
1164 return false;
1167 return true;
1169 default:
1170 return false;
1175 /* Fold builtin call in statement STMT. Returns a simplified tree.
1176 We may return a non-constant expression, including another call
1177 to a different function and with different arguments, e.g.,
1178 substituting memcpy for strcpy when the string length is known.
1179 Note that some builtins expand into inline code that may not
1180 be valid in GIMPLE. Callers must take care. */
1182 tree
1183 gimple_fold_builtin (gimple stmt)
1185 tree result, val[3];
1186 tree callee, a;
1187 int arg_idx, type;
1188 bitmap visited;
1189 bool ignore;
1190 int nargs;
1191 location_t loc = gimple_location (stmt);
1193 gcc_assert (is_gimple_call (stmt));
1195 ignore = (gimple_call_lhs (stmt) == NULL);
1197 /* First try the generic builtin folder. If that succeeds, return the
1198 result directly. */
1199 result = fold_call_stmt (stmt, ignore);
1200 if (result)
1202 if (ignore)
1203 STRIP_NOPS (result);
1204 return result;
1207 /* Ignore MD builtins. */
1208 callee = gimple_call_fndecl (stmt);
1209 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1210 return NULL_TREE;
1212 /* If the builtin could not be folded, and it has no argument list,
1213 we're done. */
1214 nargs = gimple_call_num_args (stmt);
1215 if (nargs == 0)
1216 return NULL_TREE;
1218 /* Limit the work only for builtins we know how to simplify. */
1219 switch (DECL_FUNCTION_CODE (callee))
1221 case BUILT_IN_STRLEN:
1222 case BUILT_IN_FPUTS:
1223 case BUILT_IN_FPUTS_UNLOCKED:
1224 arg_idx = 0;
1225 type = 0;
1226 break;
1227 case BUILT_IN_STRCPY:
1228 case BUILT_IN_STRNCPY:
1229 arg_idx = 1;
1230 type = 0;
1231 break;
1232 case BUILT_IN_MEMCPY_CHK:
1233 case BUILT_IN_MEMPCPY_CHK:
1234 case BUILT_IN_MEMMOVE_CHK:
1235 case BUILT_IN_MEMSET_CHK:
1236 case BUILT_IN_STRNCPY_CHK:
1237 arg_idx = 2;
1238 type = 2;
1239 break;
1240 case BUILT_IN_STRCPY_CHK:
1241 case BUILT_IN_STPCPY_CHK:
1242 arg_idx = 1;
1243 type = 1;
1244 break;
1245 case BUILT_IN_SNPRINTF_CHK:
1246 case BUILT_IN_VSNPRINTF_CHK:
1247 arg_idx = 1;
1248 type = 2;
1249 break;
1250 default:
1251 return NULL_TREE;
1254 if (arg_idx >= nargs)
1255 return NULL_TREE;
1257 /* Try to use the dataflow information gathered by the CCP process. */
1258 visited = BITMAP_ALLOC (NULL);
1259 bitmap_clear (visited);
1261 memset (val, 0, sizeof (val));
1262 a = gimple_call_arg (stmt, arg_idx);
1263 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1264 val[arg_idx] = NULL_TREE;
1266 BITMAP_FREE (visited);
1268 result = NULL_TREE;
1269 switch (DECL_FUNCTION_CODE (callee))
1271 case BUILT_IN_STRLEN:
1272 if (val[0] && nargs == 1)
1274 tree new_val =
1275 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1277 /* If the result is not a valid gimple value, or not a cast
1278 of a valid gimple value, then we cannot use the result. */
1279 if (is_gimple_val (new_val)
1280 || (CONVERT_EXPR_P (new_val)
1281 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1282 return new_val;
1284 break;
1286 case BUILT_IN_STRCPY:
1287 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1288 result = fold_builtin_strcpy (loc, callee,
1289 gimple_call_arg (stmt, 0),
1290 gimple_call_arg (stmt, 1),
1291 val[1]);
1292 break;
1294 case BUILT_IN_STRNCPY:
1295 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1296 result = fold_builtin_strncpy (loc, callee,
1297 gimple_call_arg (stmt, 0),
1298 gimple_call_arg (stmt, 1),
1299 gimple_call_arg (stmt, 2),
1300 val[1]);
1301 break;
1303 case BUILT_IN_FPUTS:
1304 if (nargs == 2)
1305 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1306 gimple_call_arg (stmt, 1),
1307 ignore, false, val[0]);
1308 break;
1310 case BUILT_IN_FPUTS_UNLOCKED:
1311 if (nargs == 2)
1312 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1313 gimple_call_arg (stmt, 1),
1314 ignore, true, val[0]);
1315 break;
1317 case BUILT_IN_MEMCPY_CHK:
1318 case BUILT_IN_MEMPCPY_CHK:
1319 case BUILT_IN_MEMMOVE_CHK:
1320 case BUILT_IN_MEMSET_CHK:
1321 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1322 result = fold_builtin_memory_chk (loc, callee,
1323 gimple_call_arg (stmt, 0),
1324 gimple_call_arg (stmt, 1),
1325 gimple_call_arg (stmt, 2),
1326 gimple_call_arg (stmt, 3),
1327 val[2], ignore,
1328 DECL_FUNCTION_CODE (callee));
1329 break;
1331 case BUILT_IN_STRCPY_CHK:
1332 case BUILT_IN_STPCPY_CHK:
1333 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1334 result = fold_builtin_stxcpy_chk (loc, callee,
1335 gimple_call_arg (stmt, 0),
1336 gimple_call_arg (stmt, 1),
1337 gimple_call_arg (stmt, 2),
1338 val[1], ignore,
1339 DECL_FUNCTION_CODE (callee));
1340 break;
1342 case BUILT_IN_STRNCPY_CHK:
1343 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1344 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1345 gimple_call_arg (stmt, 1),
1346 gimple_call_arg (stmt, 2),
1347 gimple_call_arg (stmt, 3),
1348 val[2]);
1349 break;
1351 case BUILT_IN_SNPRINTF_CHK:
1352 case BUILT_IN_VSNPRINTF_CHK:
1353 if (val[1] && is_gimple_val (val[1]))
1354 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1355 DECL_FUNCTION_CODE (callee));
1356 break;
1358 default:
1359 gcc_unreachable ();
1362 if (result && ignore)
1363 result = fold_ignored_result (result);
1364 return result;
1367 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1368 it is found or NULL_TREE if it is not. */
1370 static tree
1371 get_base_binfo_for_type (tree binfo, tree type)
1373 int i;
1374 tree base_binfo;
1375 tree res = NULL_TREE;
1377 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1378 if (TREE_TYPE (base_binfo) == type)
1380 gcc_assert (!res);
1381 res = base_binfo;
1384 return res;
1387 /* Return a binfo describing the part of object referenced by expression REF.
1388 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1389 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1390 a simple declaration, indirect reference or an SSA_NAME. If the function
1391 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1392 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1393 Otherwise the first non-artificial field declaration or the base declaration
1394 will be examined to get the encapsulating type. */
1396 tree
1397 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1399 while (true)
1401 if (TREE_CODE (ref) == COMPONENT_REF)
1403 tree par_type;
1404 tree binfo;
1405 tree field = TREE_OPERAND (ref, 1);
1407 if (!DECL_ARTIFICIAL (field))
1409 tree type = TREE_TYPE (field);
1410 if (TREE_CODE (type) == RECORD_TYPE)
1411 return TYPE_BINFO (type);
1412 else
1413 return NULL_TREE;
1416 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1417 binfo = TYPE_BINFO (par_type);
1418 if (!binfo
1419 || BINFO_N_BASE_BINFOS (binfo) == 0)
1420 return NULL_TREE;
1422 /* Offset 0 indicates the primary base, whose vtable contents are
1423 represented in the binfo for the derived class. */
1424 if (int_bit_position (field) != 0)
1426 tree d_binfo;
1428 /* Get descendant binfo. */
1429 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1430 known_binfo);
1431 if (!d_binfo)
1432 return NULL_TREE;
1433 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1436 ref = TREE_OPERAND (ref, 0);
1438 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1439 return TYPE_BINFO (TREE_TYPE (ref));
1440 else if (known_binfo
1441 && (TREE_CODE (ref) == SSA_NAME
1442 || TREE_CODE (ref) == MEM_REF))
1443 return known_binfo;
1444 else
1445 return NULL_TREE;
1449 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1450 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1451 KNOWN_BINFO carries the binfo describing the true type of
1452 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1453 with a this adjustment, the constant which should be added to this pointer
1454 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1455 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1457 tree
1458 gimple_get_virt_mehtod_for_binfo (HOST_WIDE_INT token, tree known_binfo,
1459 tree *delta, bool refuse_thunks)
1461 HOST_WIDE_INT i;
1462 tree v, fndecl;
1463 struct cgraph_node *node;
1465 v = BINFO_VIRTUALS (known_binfo);
1466 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1467 if (!v)
1468 return NULL_TREE;
1469 i = 0;
1470 while (i != token)
1472 i += (TARGET_VTABLE_USES_DESCRIPTORS
1473 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1474 v = TREE_CHAIN (v);
1477 fndecl = TREE_VALUE (v);
1478 node = cgraph_get_node_or_alias (fndecl);
1479 if (refuse_thunks
1480 && (!node
1481 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1482 thunks are represented by a constant in TREE_PURPOSE of items in
1483 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1484 yet.
1486 FIXME: Remove the following condition once we are able to represent
1487 thunk information on call graph edges. */
1488 || (node->same_body_alias && node->thunk.thunk_p)))
1489 return NULL_TREE;
1491 /* When cgraph node is missing and function is not public, we cannot
1492 devirtualize. This can happen in WHOPR when the actual method
1493 ends up in other partition, because we found devirtualization
1494 possibility too late. */
1495 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1496 return NULL_TREE;
1498 *delta = TREE_PURPOSE (v);
1499 gcc_checking_assert (host_integerp (*delta, 0));
1500 return fndecl;
1503 /* Generate code adjusting the first parameter of a call statement determined
1504 by GSI by DELTA. */
1506 void
1507 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1509 gimple call_stmt = gsi_stmt (*gsi);
1510 tree parm, tmp;
1511 gimple new_stmt;
1513 delta = fold_convert (sizetype, delta);
1514 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1515 parm = gimple_call_arg (call_stmt, 0);
1516 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1517 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1518 add_referenced_var (tmp);
1520 tmp = make_ssa_name (tmp, NULL);
1521 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1522 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1523 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1524 gimple_call_set_arg (call_stmt, 0, tmp);
1527 /* Fold a call statement to OBJ_TYPE_REF to a direct call, if possible. GSI
1528 determines the statement, generating new statements is allowed only if
1529 INPLACE is false. Return true iff the statement was changed. */
1531 static bool
1532 gimple_fold_obj_type_ref_call (gimple_stmt_iterator *gsi, bool inplace)
1534 gimple stmt = gsi_stmt (*gsi);
1535 tree ref = gimple_call_fn (stmt);
1536 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1537 tree binfo, fndecl, delta;
1538 HOST_WIDE_INT token;
1540 if (TREE_CODE (obj) == ADDR_EXPR)
1541 obj = TREE_OPERAND (obj, 0);
1542 else
1543 return false;
1545 binfo = gimple_get_relevant_ref_binfo (obj, NULL_TREE);
1546 if (!binfo)
1547 return false;
1548 token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1549 fndecl = gimple_get_virt_mehtod_for_binfo (token, binfo, &delta,
1550 !DECL_P (obj));
1551 if (!fndecl)
1552 return false;
1554 if (integer_nonzerop (delta))
1556 if (inplace)
1557 return false;
1558 gimple_adjust_this_by_delta (gsi, delta);
1561 gimple_call_set_fndecl (stmt, fndecl);
1562 return true;
1565 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1566 The statement may be replaced by another statement, e.g., if the call
1567 simplifies to a constant value. Return true if any changes were made.
1568 It is assumed that the operands have been previously folded. */
1570 bool
1571 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1573 gimple stmt = gsi_stmt (*gsi);
1575 tree callee = gimple_call_fndecl (stmt);
1577 /* Check for builtins that CCP can handle using information not
1578 available in the generic fold routines. */
1579 if (!inplace && callee && DECL_BUILT_IN (callee))
1581 tree result = gimple_fold_builtin (stmt);
1583 if (result)
1585 if (!update_call_from_tree (gsi, result))
1586 gimplify_and_update_call_from_tree (gsi, result);
1587 return true;
1590 else
1592 /* ??? Should perhaps do this in fold proper. However, doing it
1593 there requires that we create a new CALL_EXPR, and that requires
1594 copying EH region info to the new node. Easier to just do it
1595 here where we can just smash the call operand. */
1596 callee = gimple_call_fn (stmt);
1597 if (TREE_CODE (callee) == OBJ_TYPE_REF)
1598 return gimple_fold_obj_type_ref_call (gsi, inplace);
1601 return false;
1604 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1605 distinguishes both cases. */
1607 static bool
1608 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1610 bool changed = false;
1611 gimple stmt = gsi_stmt (*gsi);
1612 unsigned i;
1614 /* Fold the main computation performed by the statement. */
1615 switch (gimple_code (stmt))
1617 case GIMPLE_ASSIGN:
1619 unsigned old_num_ops = gimple_num_ops (stmt);
1620 tree new_rhs = fold_gimple_assign (gsi);
1621 tree lhs = gimple_assign_lhs (stmt);
1622 if (new_rhs
1623 && !useless_type_conversion_p (TREE_TYPE (lhs),
1624 TREE_TYPE (new_rhs)))
1625 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1626 if (new_rhs
1627 && (!inplace
1628 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1630 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1631 changed = true;
1633 break;
1636 case GIMPLE_COND:
1637 changed |= fold_gimple_cond (stmt);
1638 break;
1640 case GIMPLE_CALL:
1641 /* Fold *& in call arguments. */
1642 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1643 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1645 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1646 if (tmp)
1648 gimple_call_set_arg (stmt, i, tmp);
1649 changed = true;
1652 changed |= gimple_fold_call (gsi, inplace);
1653 break;
1655 case GIMPLE_ASM:
1656 /* Fold *& in asm operands. */
1657 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1659 tree link = gimple_asm_output_op (stmt, i);
1660 tree op = TREE_VALUE (link);
1661 if (REFERENCE_CLASS_P (op)
1662 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1664 TREE_VALUE (link) = op;
1665 changed = true;
1668 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1670 tree link = gimple_asm_input_op (stmt, i);
1671 tree op = TREE_VALUE (link);
1672 if (REFERENCE_CLASS_P (op)
1673 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1675 TREE_VALUE (link) = op;
1676 changed = true;
1679 break;
1681 case GIMPLE_DEBUG:
1682 if (gimple_debug_bind_p (stmt))
1684 tree val = gimple_debug_bind_get_value (stmt);
1685 if (val
1686 && REFERENCE_CLASS_P (val))
1688 tree tem = maybe_fold_reference (val, false);
1689 if (tem)
1691 gimple_debug_bind_set_value (stmt, tem);
1692 changed = true;
1696 break;
1698 default:;
1701 stmt = gsi_stmt (*gsi);
1703 /* Fold *& on the lhs. */
1704 if (gimple_has_lhs (stmt))
1706 tree lhs = gimple_get_lhs (stmt);
1707 if (lhs && REFERENCE_CLASS_P (lhs))
1709 tree new_lhs = maybe_fold_reference (lhs, true);
1710 if (new_lhs)
1712 gimple_set_lhs (stmt, new_lhs);
1713 changed = true;
1718 return changed;
1721 /* Fold the statement pointed to by GSI. In some cases, this function may
1722 replace the whole statement with a new one. Returns true iff folding
1723 makes any changes.
1724 The statement pointed to by GSI should be in valid gimple form but may
1725 be in unfolded state as resulting from for example constant propagation
1726 which can produce *&x = 0. */
1728 bool
1729 fold_stmt (gimple_stmt_iterator *gsi)
1731 return fold_stmt_1 (gsi, false);
1734 /* Perform the minimal folding on statement STMT. Only operations like
1735 *&x created by constant propagation are handled. The statement cannot
1736 be replaced with a new one. Return true if the statement was
1737 changed, false otherwise.
1738 The statement STMT should be in valid gimple form but may
1739 be in unfolded state as resulting from for example constant propagation
1740 which can produce *&x = 0. */
1742 bool
1743 fold_stmt_inplace (gimple stmt)
1745 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1746 bool changed = fold_stmt_1 (&gsi, true);
1747 gcc_assert (gsi_stmt (gsi) == stmt);
1748 return changed;
1751 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1752 if EXPR is null or we don't know how.
1753 If non-null, the result always has boolean type. */
1755 static tree
1756 canonicalize_bool (tree expr, bool invert)
1758 if (!expr)
1759 return NULL_TREE;
1760 else if (invert)
1762 if (integer_nonzerop (expr))
1763 return boolean_false_node;
1764 else if (integer_zerop (expr))
1765 return boolean_true_node;
1766 else if (TREE_CODE (expr) == SSA_NAME)
1767 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1768 build_int_cst (TREE_TYPE (expr), 0));
1769 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1770 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1771 boolean_type_node,
1772 TREE_OPERAND (expr, 0),
1773 TREE_OPERAND (expr, 1));
1774 else
1775 return NULL_TREE;
1777 else
1779 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1780 return expr;
1781 if (integer_nonzerop (expr))
1782 return boolean_true_node;
1783 else if (integer_zerop (expr))
1784 return boolean_false_node;
1785 else if (TREE_CODE (expr) == SSA_NAME)
1786 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1787 build_int_cst (TREE_TYPE (expr), 0));
1788 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1789 return fold_build2 (TREE_CODE (expr),
1790 boolean_type_node,
1791 TREE_OPERAND (expr, 0),
1792 TREE_OPERAND (expr, 1));
1793 else
1794 return NULL_TREE;
1798 /* Check to see if a boolean expression EXPR is logically equivalent to the
1799 comparison (OP1 CODE OP2). Check for various identities involving
1800 SSA_NAMEs. */
1802 static bool
1803 same_bool_comparison_p (const_tree expr, enum tree_code code,
1804 const_tree op1, const_tree op2)
1806 gimple s;
1808 /* The obvious case. */
1809 if (TREE_CODE (expr) == code
1810 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1811 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1812 return true;
1814 /* Check for comparing (name, name != 0) and the case where expr
1815 is an SSA_NAME with a definition matching the comparison. */
1816 if (TREE_CODE (expr) == SSA_NAME
1817 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1819 if (operand_equal_p (expr, op1, 0))
1820 return ((code == NE_EXPR && integer_zerop (op2))
1821 || (code == EQ_EXPR && integer_nonzerop (op2)));
1822 s = SSA_NAME_DEF_STMT (expr);
1823 if (is_gimple_assign (s)
1824 && gimple_assign_rhs_code (s) == code
1825 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1826 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1827 return true;
1830 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1831 of name is a comparison, recurse. */
1832 if (TREE_CODE (op1) == SSA_NAME
1833 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1835 s = SSA_NAME_DEF_STMT (op1);
1836 if (is_gimple_assign (s)
1837 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1839 enum tree_code c = gimple_assign_rhs_code (s);
1840 if ((c == NE_EXPR && integer_zerop (op2))
1841 || (c == EQ_EXPR && integer_nonzerop (op2)))
1842 return same_bool_comparison_p (expr, c,
1843 gimple_assign_rhs1 (s),
1844 gimple_assign_rhs2 (s));
1845 if ((c == EQ_EXPR && integer_zerop (op2))
1846 || (c == NE_EXPR && integer_nonzerop (op2)))
1847 return same_bool_comparison_p (expr,
1848 invert_tree_comparison (c, false),
1849 gimple_assign_rhs1 (s),
1850 gimple_assign_rhs2 (s));
1853 return false;
1856 /* Check to see if two boolean expressions OP1 and OP2 are logically
1857 equivalent. */
1859 static bool
1860 same_bool_result_p (const_tree op1, const_tree op2)
1862 /* Simple cases first. */
1863 if (operand_equal_p (op1, op2, 0))
1864 return true;
1866 /* Check the cases where at least one of the operands is a comparison.
1867 These are a bit smarter than operand_equal_p in that they apply some
1868 identifies on SSA_NAMEs. */
1869 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1870 && same_bool_comparison_p (op1, TREE_CODE (op2),
1871 TREE_OPERAND (op2, 0),
1872 TREE_OPERAND (op2, 1)))
1873 return true;
1874 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1875 && same_bool_comparison_p (op2, TREE_CODE (op1),
1876 TREE_OPERAND (op1, 0),
1877 TREE_OPERAND (op1, 1)))
1878 return true;
1880 /* Default case. */
1881 return false;
1884 /* Forward declarations for some mutually recursive functions. */
1886 static tree
1887 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1888 enum tree_code code2, tree op2a, tree op2b);
1889 static tree
1890 and_var_with_comparison (tree var, bool invert,
1891 enum tree_code code2, tree op2a, tree op2b);
1892 static tree
1893 and_var_with_comparison_1 (gimple stmt,
1894 enum tree_code code2, tree op2a, tree op2b);
1895 static tree
1896 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1897 enum tree_code code2, tree op2a, tree op2b);
1898 static tree
1899 or_var_with_comparison (tree var, bool invert,
1900 enum tree_code code2, tree op2a, tree op2b);
1901 static tree
1902 or_var_with_comparison_1 (gimple stmt,
1903 enum tree_code code2, tree op2a, tree op2b);
1905 /* Helper function for and_comparisons_1: try to simplify the AND of the
1906 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1907 If INVERT is true, invert the value of the VAR before doing the AND.
1908 Return NULL_EXPR if we can't simplify this to a single expression. */
1910 static tree
1911 and_var_with_comparison (tree var, bool invert,
1912 enum tree_code code2, tree op2a, tree op2b)
1914 tree t;
1915 gimple stmt = SSA_NAME_DEF_STMT (var);
1917 /* We can only deal with variables whose definitions are assignments. */
1918 if (!is_gimple_assign (stmt))
1919 return NULL_TREE;
1921 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1922 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1923 Then we only have to consider the simpler non-inverted cases. */
1924 if (invert)
1925 t = or_var_with_comparison_1 (stmt,
1926 invert_tree_comparison (code2, false),
1927 op2a, op2b);
1928 else
1929 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1930 return canonicalize_bool (t, invert);
1933 /* Try to simplify the AND of the ssa variable defined by the assignment
1934 STMT with the comparison specified by (OP2A CODE2 OP2B).
1935 Return NULL_EXPR if we can't simplify this to a single expression. */
1937 static tree
1938 and_var_with_comparison_1 (gimple stmt,
1939 enum tree_code code2, tree op2a, tree op2b)
1941 tree var = gimple_assign_lhs (stmt);
1942 tree true_test_var = NULL_TREE;
1943 tree false_test_var = NULL_TREE;
1944 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1946 /* Check for identities like (var AND (var == 0)) => false. */
1947 if (TREE_CODE (op2a) == SSA_NAME
1948 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1950 if ((code2 == NE_EXPR && integer_zerop (op2b))
1951 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1953 true_test_var = op2a;
1954 if (var == true_test_var)
1955 return var;
1957 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1958 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1960 false_test_var = op2a;
1961 if (var == false_test_var)
1962 return boolean_false_node;
1966 /* If the definition is a comparison, recurse on it. */
1967 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1969 tree t = and_comparisons_1 (innercode,
1970 gimple_assign_rhs1 (stmt),
1971 gimple_assign_rhs2 (stmt),
1972 code2,
1973 op2a,
1974 op2b);
1975 if (t)
1976 return t;
1979 /* If the definition is an AND or OR expression, we may be able to
1980 simplify by reassociating. */
1981 if (innercode == TRUTH_AND_EXPR
1982 || innercode == TRUTH_OR_EXPR
1983 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1984 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1986 tree inner1 = gimple_assign_rhs1 (stmt);
1987 tree inner2 = gimple_assign_rhs2 (stmt);
1988 gimple s;
1989 tree t;
1990 tree partial = NULL_TREE;
1991 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1993 /* Check for boolean identities that don't require recursive examination
1994 of inner1/inner2:
1995 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1996 inner1 AND (inner1 OR inner2) => inner1
1997 !inner1 AND (inner1 AND inner2) => false
1998 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1999 Likewise for similar cases involving inner2. */
2000 if (inner1 == true_test_var)
2001 return (is_and ? var : inner1);
2002 else if (inner2 == true_test_var)
2003 return (is_and ? var : inner2);
2004 else if (inner1 == false_test_var)
2005 return (is_and
2006 ? boolean_false_node
2007 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
2008 else if (inner2 == false_test_var)
2009 return (is_and
2010 ? boolean_false_node
2011 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
2013 /* Next, redistribute/reassociate the AND across the inner tests.
2014 Compute the first partial result, (inner1 AND (op2a code op2b)) */
2015 if (TREE_CODE (inner1) == SSA_NAME
2016 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2017 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2018 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2019 gimple_assign_rhs1 (s),
2020 gimple_assign_rhs2 (s),
2021 code2, op2a, op2b)))
2023 /* Handle the AND case, where we are reassociating:
2024 (inner1 AND inner2) AND (op2a code2 op2b)
2025 => (t AND inner2)
2026 If the partial result t is a constant, we win. Otherwise
2027 continue on to try reassociating with the other inner test. */
2028 if (is_and)
2030 if (integer_onep (t))
2031 return inner2;
2032 else if (integer_zerop (t))
2033 return boolean_false_node;
2036 /* Handle the OR case, where we are redistributing:
2037 (inner1 OR inner2) AND (op2a code2 op2b)
2038 => (t OR (inner2 AND (op2a code2 op2b))) */
2039 else if (integer_onep (t))
2040 return boolean_true_node;
2042 /* Save partial result for later. */
2043 partial = t;
2046 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2047 if (TREE_CODE (inner2) == SSA_NAME
2048 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2049 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2050 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2051 gimple_assign_rhs1 (s),
2052 gimple_assign_rhs2 (s),
2053 code2, op2a, op2b)))
2055 /* Handle the AND case, where we are reassociating:
2056 (inner1 AND inner2) AND (op2a code2 op2b)
2057 => (inner1 AND t) */
2058 if (is_and)
2060 if (integer_onep (t))
2061 return inner1;
2062 else if (integer_zerop (t))
2063 return boolean_false_node;
2064 /* If both are the same, we can apply the identity
2065 (x AND x) == x. */
2066 else if (partial && same_bool_result_p (t, partial))
2067 return t;
2070 /* Handle the OR case. where we are redistributing:
2071 (inner1 OR inner2) AND (op2a code2 op2b)
2072 => (t OR (inner1 AND (op2a code2 op2b)))
2073 => (t OR partial) */
2074 else
2076 if (integer_onep (t))
2077 return boolean_true_node;
2078 else if (partial)
2080 /* We already got a simplification for the other
2081 operand to the redistributed OR expression. The
2082 interesting case is when at least one is false.
2083 Or, if both are the same, we can apply the identity
2084 (x OR x) == x. */
2085 if (integer_zerop (partial))
2086 return t;
2087 else if (integer_zerop (t))
2088 return partial;
2089 else if (same_bool_result_p (t, partial))
2090 return t;
2095 return NULL_TREE;
2098 /* Try to simplify the AND of two comparisons defined by
2099 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2100 If this can be done without constructing an intermediate value,
2101 return the resulting tree; otherwise NULL_TREE is returned.
2102 This function is deliberately asymmetric as it recurses on SSA_DEFs
2103 in the first comparison but not the second. */
2105 static tree
2106 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2107 enum tree_code code2, tree op2a, tree op2b)
2109 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2110 if (operand_equal_p (op1a, op2a, 0)
2111 && operand_equal_p (op1b, op2b, 0))
2113 tree t = combine_comparisons (UNKNOWN_LOCATION,
2114 TRUTH_ANDIF_EXPR, code1, code2,
2115 boolean_type_node, op1a, op1b);
2116 if (t)
2117 return t;
2120 /* Likewise the swapped case of the above. */
2121 if (operand_equal_p (op1a, op2b, 0)
2122 && operand_equal_p (op1b, op2a, 0))
2124 tree t = combine_comparisons (UNKNOWN_LOCATION,
2125 TRUTH_ANDIF_EXPR, code1,
2126 swap_tree_comparison (code2),
2127 boolean_type_node, op1a, op1b);
2128 if (t)
2129 return t;
2132 /* If both comparisons are of the same value against constants, we might
2133 be able to merge them. */
2134 if (operand_equal_p (op1a, op2a, 0)
2135 && TREE_CODE (op1b) == INTEGER_CST
2136 && TREE_CODE (op2b) == INTEGER_CST)
2138 int cmp = tree_int_cst_compare (op1b, op2b);
2140 /* If we have (op1a == op1b), we should either be able to
2141 return that or FALSE, depending on whether the constant op1b
2142 also satisfies the other comparison against op2b. */
2143 if (code1 == EQ_EXPR)
2145 bool done = true;
2146 bool val;
2147 switch (code2)
2149 case EQ_EXPR: val = (cmp == 0); break;
2150 case NE_EXPR: val = (cmp != 0); break;
2151 case LT_EXPR: val = (cmp < 0); break;
2152 case GT_EXPR: val = (cmp > 0); break;
2153 case LE_EXPR: val = (cmp <= 0); break;
2154 case GE_EXPR: val = (cmp >= 0); break;
2155 default: done = false;
2157 if (done)
2159 if (val)
2160 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2161 else
2162 return boolean_false_node;
2165 /* Likewise if the second comparison is an == comparison. */
2166 else if (code2 == EQ_EXPR)
2168 bool done = true;
2169 bool val;
2170 switch (code1)
2172 case EQ_EXPR: val = (cmp == 0); break;
2173 case NE_EXPR: val = (cmp != 0); break;
2174 case LT_EXPR: val = (cmp > 0); break;
2175 case GT_EXPR: val = (cmp < 0); break;
2176 case LE_EXPR: val = (cmp >= 0); break;
2177 case GE_EXPR: val = (cmp <= 0); break;
2178 default: done = false;
2180 if (done)
2182 if (val)
2183 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2184 else
2185 return boolean_false_node;
2189 /* Same business with inequality tests. */
2190 else if (code1 == NE_EXPR)
2192 bool val;
2193 switch (code2)
2195 case EQ_EXPR: val = (cmp != 0); break;
2196 case NE_EXPR: val = (cmp == 0); break;
2197 case LT_EXPR: val = (cmp >= 0); break;
2198 case GT_EXPR: val = (cmp <= 0); break;
2199 case LE_EXPR: val = (cmp > 0); break;
2200 case GE_EXPR: val = (cmp < 0); break;
2201 default:
2202 val = false;
2204 if (val)
2205 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2207 else if (code2 == NE_EXPR)
2209 bool val;
2210 switch (code1)
2212 case EQ_EXPR: val = (cmp == 0); break;
2213 case NE_EXPR: val = (cmp != 0); break;
2214 case LT_EXPR: val = (cmp <= 0); break;
2215 case GT_EXPR: val = (cmp >= 0); break;
2216 case LE_EXPR: val = (cmp < 0); break;
2217 case GE_EXPR: val = (cmp > 0); break;
2218 default:
2219 val = false;
2221 if (val)
2222 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2225 /* Chose the more restrictive of two < or <= comparisons. */
2226 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2227 && (code2 == LT_EXPR || code2 == LE_EXPR))
2229 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2230 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2231 else
2232 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2235 /* Likewise chose the more restrictive of two > or >= comparisons. */
2236 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2237 && (code2 == GT_EXPR || code2 == GE_EXPR))
2239 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2240 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2241 else
2242 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2245 /* Check for singleton ranges. */
2246 else if (cmp == 0
2247 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2248 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2249 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2251 /* Check for disjoint ranges. */
2252 else if (cmp <= 0
2253 && (code1 == LT_EXPR || code1 == LE_EXPR)
2254 && (code2 == GT_EXPR || code2 == GE_EXPR))
2255 return boolean_false_node;
2256 else if (cmp >= 0
2257 && (code1 == GT_EXPR || code1 == GE_EXPR)
2258 && (code2 == LT_EXPR || code2 == LE_EXPR))
2259 return boolean_false_node;
2262 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2263 NAME's definition is a truth value. See if there are any simplifications
2264 that can be done against the NAME's definition. */
2265 if (TREE_CODE (op1a) == SSA_NAME
2266 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2267 && (integer_zerop (op1b) || integer_onep (op1b)))
2269 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2270 || (code1 == NE_EXPR && integer_onep (op1b)));
2271 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2272 switch (gimple_code (stmt))
2274 case GIMPLE_ASSIGN:
2275 /* Try to simplify by copy-propagating the definition. */
2276 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2278 case GIMPLE_PHI:
2279 /* If every argument to the PHI produces the same result when
2280 ANDed with the second comparison, we win.
2281 Do not do this unless the type is bool since we need a bool
2282 result here anyway. */
2283 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2285 tree result = NULL_TREE;
2286 unsigned i;
2287 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2289 tree arg = gimple_phi_arg_def (stmt, i);
2291 /* If this PHI has itself as an argument, ignore it.
2292 If all the other args produce the same result,
2293 we're still OK. */
2294 if (arg == gimple_phi_result (stmt))
2295 continue;
2296 else if (TREE_CODE (arg) == INTEGER_CST)
2298 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2300 if (!result)
2301 result = boolean_false_node;
2302 else if (!integer_zerop (result))
2303 return NULL_TREE;
2305 else if (!result)
2306 result = fold_build2 (code2, boolean_type_node,
2307 op2a, op2b);
2308 else if (!same_bool_comparison_p (result,
2309 code2, op2a, op2b))
2310 return NULL_TREE;
2312 else if (TREE_CODE (arg) == SSA_NAME)
2314 tree temp = and_var_with_comparison (arg, invert,
2315 code2, op2a, op2b);
2316 if (!temp)
2317 return NULL_TREE;
2318 else if (!result)
2319 result = temp;
2320 else if (!same_bool_result_p (result, temp))
2321 return NULL_TREE;
2323 else
2324 return NULL_TREE;
2326 return result;
2329 default:
2330 break;
2333 return NULL_TREE;
2336 /* Try to simplify the AND of two comparisons, specified by
2337 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2338 If this can be simplified to a single expression (without requiring
2339 introducing more SSA variables to hold intermediate values),
2340 return the resulting tree. Otherwise return NULL_TREE.
2341 If the result expression is non-null, it has boolean type. */
2343 tree
2344 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2345 enum tree_code code2, tree op2a, tree op2b)
2347 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2348 if (t)
2349 return t;
2350 else
2351 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2354 /* Helper function for or_comparisons_1: try to simplify the OR of the
2355 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2356 If INVERT is true, invert the value of VAR before doing the OR.
2357 Return NULL_EXPR if we can't simplify this to a single expression. */
2359 static tree
2360 or_var_with_comparison (tree var, bool invert,
2361 enum tree_code code2, tree op2a, tree op2b)
2363 tree t;
2364 gimple stmt = SSA_NAME_DEF_STMT (var);
2366 /* We can only deal with variables whose definitions are assignments. */
2367 if (!is_gimple_assign (stmt))
2368 return NULL_TREE;
2370 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2371 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2372 Then we only have to consider the simpler non-inverted cases. */
2373 if (invert)
2374 t = and_var_with_comparison_1 (stmt,
2375 invert_tree_comparison (code2, false),
2376 op2a, op2b);
2377 else
2378 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2379 return canonicalize_bool (t, invert);
2382 /* Try to simplify the OR of the ssa variable defined by the assignment
2383 STMT with the comparison specified by (OP2A CODE2 OP2B).
2384 Return NULL_EXPR if we can't simplify this to a single expression. */
2386 static tree
2387 or_var_with_comparison_1 (gimple stmt,
2388 enum tree_code code2, tree op2a, tree op2b)
2390 tree var = gimple_assign_lhs (stmt);
2391 tree true_test_var = NULL_TREE;
2392 tree false_test_var = NULL_TREE;
2393 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2395 /* Check for identities like (var OR (var != 0)) => true . */
2396 if (TREE_CODE (op2a) == SSA_NAME
2397 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2399 if ((code2 == NE_EXPR && integer_zerop (op2b))
2400 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2402 true_test_var = op2a;
2403 if (var == true_test_var)
2404 return var;
2406 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2407 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2409 false_test_var = op2a;
2410 if (var == false_test_var)
2411 return boolean_true_node;
2415 /* If the definition is a comparison, recurse on it. */
2416 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2418 tree t = or_comparisons_1 (innercode,
2419 gimple_assign_rhs1 (stmt),
2420 gimple_assign_rhs2 (stmt),
2421 code2,
2422 op2a,
2423 op2b);
2424 if (t)
2425 return t;
2428 /* If the definition is an AND or OR expression, we may be able to
2429 simplify by reassociating. */
2430 if (innercode == TRUTH_AND_EXPR
2431 || innercode == TRUTH_OR_EXPR
2432 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2433 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2435 tree inner1 = gimple_assign_rhs1 (stmt);
2436 tree inner2 = gimple_assign_rhs2 (stmt);
2437 gimple s;
2438 tree t;
2439 tree partial = NULL_TREE;
2440 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2442 /* Check for boolean identities that don't require recursive examination
2443 of inner1/inner2:
2444 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2445 inner1 OR (inner1 AND inner2) => inner1
2446 !inner1 OR (inner1 OR inner2) => true
2447 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2449 if (inner1 == true_test_var)
2450 return (is_or ? var : inner1);
2451 else if (inner2 == true_test_var)
2452 return (is_or ? var : inner2);
2453 else if (inner1 == false_test_var)
2454 return (is_or
2455 ? boolean_true_node
2456 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2457 else if (inner2 == false_test_var)
2458 return (is_or
2459 ? boolean_true_node
2460 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2462 /* Next, redistribute/reassociate the OR across the inner tests.
2463 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2464 if (TREE_CODE (inner1) == SSA_NAME
2465 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2466 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2467 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2468 gimple_assign_rhs1 (s),
2469 gimple_assign_rhs2 (s),
2470 code2, op2a, op2b)))
2472 /* Handle the OR case, where we are reassociating:
2473 (inner1 OR inner2) OR (op2a code2 op2b)
2474 => (t OR inner2)
2475 If the partial result t is a constant, we win. Otherwise
2476 continue on to try reassociating with the other inner test. */
2477 if (is_or)
2479 if (integer_onep (t))
2480 return boolean_true_node;
2481 else if (integer_zerop (t))
2482 return inner2;
2485 /* Handle the AND case, where we are redistributing:
2486 (inner1 AND inner2) OR (op2a code2 op2b)
2487 => (t AND (inner2 OR (op2a code op2b))) */
2488 else if (integer_zerop (t))
2489 return boolean_false_node;
2491 /* Save partial result for later. */
2492 partial = t;
2495 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2496 if (TREE_CODE (inner2) == SSA_NAME
2497 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2498 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2499 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2500 gimple_assign_rhs1 (s),
2501 gimple_assign_rhs2 (s),
2502 code2, op2a, op2b)))
2504 /* Handle the OR case, where we are reassociating:
2505 (inner1 OR inner2) OR (op2a code2 op2b)
2506 => (inner1 OR t)
2507 => (t OR partial) */
2508 if (is_or)
2510 if (integer_zerop (t))
2511 return inner1;
2512 else if (integer_onep (t))
2513 return boolean_true_node;
2514 /* If both are the same, we can apply the identity
2515 (x OR x) == x. */
2516 else if (partial && same_bool_result_p (t, partial))
2517 return t;
2520 /* Handle the AND case, where we are redistributing:
2521 (inner1 AND inner2) OR (op2a code2 op2b)
2522 => (t AND (inner1 OR (op2a code2 op2b)))
2523 => (t AND partial) */
2524 else
2526 if (integer_zerop (t))
2527 return boolean_false_node;
2528 else if (partial)
2530 /* We already got a simplification for the other
2531 operand to the redistributed AND expression. The
2532 interesting case is when at least one is true.
2533 Or, if both are the same, we can apply the identity
2534 (x AND x) == x. */
2535 if (integer_onep (partial))
2536 return t;
2537 else if (integer_onep (t))
2538 return partial;
2539 else if (same_bool_result_p (t, partial))
2540 return t;
2545 return NULL_TREE;
2548 /* Try to simplify the OR of two comparisons defined by
2549 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2550 If this can be done without constructing an intermediate value,
2551 return the resulting tree; otherwise NULL_TREE is returned.
2552 This function is deliberately asymmetric as it recurses on SSA_DEFs
2553 in the first comparison but not the second. */
2555 static tree
2556 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2557 enum tree_code code2, tree op2a, tree op2b)
2559 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2560 if (operand_equal_p (op1a, op2a, 0)
2561 && operand_equal_p (op1b, op2b, 0))
2563 tree t = combine_comparisons (UNKNOWN_LOCATION,
2564 TRUTH_ORIF_EXPR, code1, code2,
2565 boolean_type_node, op1a, op1b);
2566 if (t)
2567 return t;
2570 /* Likewise the swapped case of the above. */
2571 if (operand_equal_p (op1a, op2b, 0)
2572 && operand_equal_p (op1b, op2a, 0))
2574 tree t = combine_comparisons (UNKNOWN_LOCATION,
2575 TRUTH_ORIF_EXPR, code1,
2576 swap_tree_comparison (code2),
2577 boolean_type_node, op1a, op1b);
2578 if (t)
2579 return t;
2582 /* If both comparisons are of the same value against constants, we might
2583 be able to merge them. */
2584 if (operand_equal_p (op1a, op2a, 0)
2585 && TREE_CODE (op1b) == INTEGER_CST
2586 && TREE_CODE (op2b) == INTEGER_CST)
2588 int cmp = tree_int_cst_compare (op1b, op2b);
2590 /* If we have (op1a != op1b), we should either be able to
2591 return that or TRUE, depending on whether the constant op1b
2592 also satisfies the other comparison against op2b. */
2593 if (code1 == NE_EXPR)
2595 bool done = true;
2596 bool val;
2597 switch (code2)
2599 case EQ_EXPR: val = (cmp == 0); break;
2600 case NE_EXPR: val = (cmp != 0); break;
2601 case LT_EXPR: val = (cmp < 0); break;
2602 case GT_EXPR: val = (cmp > 0); break;
2603 case LE_EXPR: val = (cmp <= 0); break;
2604 case GE_EXPR: val = (cmp >= 0); break;
2605 default: done = false;
2607 if (done)
2609 if (val)
2610 return boolean_true_node;
2611 else
2612 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2615 /* Likewise if the second comparison is a != comparison. */
2616 else if (code2 == NE_EXPR)
2618 bool done = true;
2619 bool val;
2620 switch (code1)
2622 case EQ_EXPR: val = (cmp == 0); break;
2623 case NE_EXPR: val = (cmp != 0); break;
2624 case LT_EXPR: val = (cmp > 0); break;
2625 case GT_EXPR: val = (cmp < 0); break;
2626 case LE_EXPR: val = (cmp >= 0); break;
2627 case GE_EXPR: val = (cmp <= 0); break;
2628 default: done = false;
2630 if (done)
2632 if (val)
2633 return boolean_true_node;
2634 else
2635 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2639 /* See if an equality test is redundant with the other comparison. */
2640 else if (code1 == EQ_EXPR)
2642 bool val;
2643 switch (code2)
2645 case EQ_EXPR: val = (cmp == 0); break;
2646 case NE_EXPR: val = (cmp != 0); break;
2647 case LT_EXPR: val = (cmp < 0); break;
2648 case GT_EXPR: val = (cmp > 0); break;
2649 case LE_EXPR: val = (cmp <= 0); break;
2650 case GE_EXPR: val = (cmp >= 0); break;
2651 default:
2652 val = false;
2654 if (val)
2655 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2657 else if (code2 == EQ_EXPR)
2659 bool val;
2660 switch (code1)
2662 case EQ_EXPR: val = (cmp == 0); break;
2663 case NE_EXPR: val = (cmp != 0); break;
2664 case LT_EXPR: val = (cmp > 0); break;
2665 case GT_EXPR: val = (cmp < 0); break;
2666 case LE_EXPR: val = (cmp >= 0); break;
2667 case GE_EXPR: val = (cmp <= 0); break;
2668 default:
2669 val = false;
2671 if (val)
2672 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2675 /* Chose the less restrictive of two < or <= comparisons. */
2676 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2677 && (code2 == LT_EXPR || code2 == LE_EXPR))
2679 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2680 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2681 else
2682 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2685 /* Likewise chose the less restrictive of two > or >= comparisons. */
2686 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2687 && (code2 == GT_EXPR || code2 == GE_EXPR))
2689 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2690 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2691 else
2692 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2695 /* Check for singleton ranges. */
2696 else if (cmp == 0
2697 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2698 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2699 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2701 /* Check for less/greater pairs that don't restrict the range at all. */
2702 else if (cmp >= 0
2703 && (code1 == LT_EXPR || code1 == LE_EXPR)
2704 && (code2 == GT_EXPR || code2 == GE_EXPR))
2705 return boolean_true_node;
2706 else if (cmp <= 0
2707 && (code1 == GT_EXPR || code1 == GE_EXPR)
2708 && (code2 == LT_EXPR || code2 == LE_EXPR))
2709 return boolean_true_node;
2712 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2713 NAME's definition is a truth value. See if there are any simplifications
2714 that can be done against the NAME's definition. */
2715 if (TREE_CODE (op1a) == SSA_NAME
2716 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2717 && (integer_zerop (op1b) || integer_onep (op1b)))
2719 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2720 || (code1 == NE_EXPR && integer_onep (op1b)));
2721 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2722 switch (gimple_code (stmt))
2724 case GIMPLE_ASSIGN:
2725 /* Try to simplify by copy-propagating the definition. */
2726 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2728 case GIMPLE_PHI:
2729 /* If every argument to the PHI produces the same result when
2730 ORed with the second comparison, we win.
2731 Do not do this unless the type is bool since we need a bool
2732 result here anyway. */
2733 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2735 tree result = NULL_TREE;
2736 unsigned i;
2737 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2739 tree arg = gimple_phi_arg_def (stmt, i);
2741 /* If this PHI has itself as an argument, ignore it.
2742 If all the other args produce the same result,
2743 we're still OK. */
2744 if (arg == gimple_phi_result (stmt))
2745 continue;
2746 else if (TREE_CODE (arg) == INTEGER_CST)
2748 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2750 if (!result)
2751 result = boolean_true_node;
2752 else if (!integer_onep (result))
2753 return NULL_TREE;
2755 else if (!result)
2756 result = fold_build2 (code2, boolean_type_node,
2757 op2a, op2b);
2758 else if (!same_bool_comparison_p (result,
2759 code2, op2a, op2b))
2760 return NULL_TREE;
2762 else if (TREE_CODE (arg) == SSA_NAME)
2764 tree temp = or_var_with_comparison (arg, invert,
2765 code2, op2a, op2b);
2766 if (!temp)
2767 return NULL_TREE;
2768 else if (!result)
2769 result = temp;
2770 else if (!same_bool_result_p (result, temp))
2771 return NULL_TREE;
2773 else
2774 return NULL_TREE;
2776 return result;
2779 default:
2780 break;
2783 return NULL_TREE;
2786 /* Try to simplify the OR of two comparisons, specified by
2787 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2788 If this can be simplified to a single expression (without requiring
2789 introducing more SSA variables to hold intermediate values),
2790 return the resulting tree. Otherwise return NULL_TREE.
2791 If the result expression is non-null, it has boolean type. */
2793 tree
2794 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2795 enum tree_code code2, tree op2a, tree op2b)
2797 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2798 if (t)
2799 return t;
2800 else
2801 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);