Merge from trunk: 215733-215743
[official-gcc.git] / gcc-4_6_3-mobile / gcc / gimple-fold.c
blobad45dcb1ad96dfc6815cf29ec1142c17a45d818c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
34 /* Return true when DECL can be referenced from current unit.
35 We can get declarations that are not possible to reference for
36 various reasons:
38 1) When analyzing C++ virtual tables.
39 C++ virtual tables do have known constructors even
40 when they are keyed to other compilation unit.
41 Those tables can contain pointers to methods and vars
42 in other units. Those methods have both STATIC and EXTERNAL
43 set.
44 2) In WHOPR mode devirtualization might lead to reference
45 to method that was partitioned elsehwere.
46 In this case we have static VAR_DECL or FUNCTION_DECL
47 that has no corresponding callgraph/varpool node
48 declaring the body.
49 3) COMDAT functions referred by external vtables that
50 we devirtualize only during final copmilation stage.
51 At this time we already decided that we will not output
52 the function body and thus we can't reference the symbol
53 directly. */
55 static bool
56 can_refer_decl_in_current_unit_p (tree decl)
58 struct varpool_node *vnode;
59 struct cgraph_node *node;
61 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
62 return true;
63 /* External flag is set, so we deal with C++ reference
64 to static object from other file. */
65 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
66 && TREE_CODE (decl) == VAR_DECL)
68 /* Just be sure it is not big in frontend setting
69 flags incorrectly. Those variables should never
70 be finalized. */
71 gcc_checking_assert (!(vnode = varpool_get_node (decl))
72 || !vnode->finalized);
73 return false;
75 /* When function is public, we always can introduce new reference.
76 Exception are the COMDAT functions where introducing a direct
77 reference imply need to include function body in the curren tunit. */
78 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
79 return true;
80 /* We are not at ltrans stage; so don't worry about WHOPR.
81 Also when still gimplifying all referred comdat functions will be
82 produced. */
83 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
84 return true;
85 /* If we already output the function body, we are safe. */
86 if (TREE_ASM_WRITTEN (decl))
87 return true;
88 if (TREE_CODE (decl) == FUNCTION_DECL)
90 node = cgraph_get_node (decl);
91 /* Check that we still have function body and that we didn't took
92 the decision to eliminate offline copy of the function yet.
93 The second is important when devirtualization happens during final
94 compilation stage when making a new reference no longer makes callee
95 to be compiled. */
96 if (!node || !node->analyzed || node->global.inlined_to)
97 return false;
99 else if (TREE_CODE (decl) == VAR_DECL)
101 vnode = varpool_get_node (decl);
102 if (!vnode || !vnode->finalized)
103 return false;
105 return true;
108 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
109 acceptable form for is_gimple_min_invariant. */
111 tree
112 canonicalize_constructor_val (tree cval)
114 STRIP_NOPS (cval);
115 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
117 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
118 TREE_OPERAND (cval, 0),
119 TREE_OPERAND (cval, 1),
120 TREE_TYPE (cval));
121 if (t)
122 cval = t;
124 if (TREE_CODE (cval) == ADDR_EXPR)
126 tree base = get_base_address (TREE_OPERAND (cval, 0));
128 if (base
129 && (TREE_CODE (base) == VAR_DECL
130 || TREE_CODE (base) == FUNCTION_DECL)
131 && !can_refer_decl_in_current_unit_p (base))
132 return NULL_TREE;
133 if (base && TREE_CODE (base) == VAR_DECL)
134 add_referenced_var (base);
135 /* We never have the chance to fixup types in global initializers
136 during gimplification. Do so here. */
137 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
138 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
140 return cval;
143 /* If SYM is a constant variable with known value, return the value.
144 NULL_TREE is returned otherwise. */
146 tree
147 get_symbol_constant_value (tree sym)
149 if (const_value_known_p (sym))
151 tree val = DECL_INITIAL (sym);
152 if (val)
154 val = canonicalize_constructor_val (val);
155 if (val && is_gimple_min_invariant (val))
156 return val;
157 else
158 return NULL_TREE;
160 /* Variables declared 'const' without an initializer
161 have zero as the initializer if they may not be
162 overridden at link or run time. */
163 if (!val
164 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
165 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
166 return build_zero_cst (TREE_TYPE (sym));
169 return NULL_TREE;
173 /* Return true if we may propagate the address expression ADDR into the
174 dereference DEREF and cancel them. */
176 bool
177 may_propagate_address_into_dereference (tree addr, tree deref)
179 gcc_assert (TREE_CODE (deref) == MEM_REF
180 && TREE_CODE (addr) == ADDR_EXPR);
182 /* Don't propagate if ADDR's operand has incomplete type. */
183 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
184 return false;
186 /* If the address is invariant then we do not need to preserve restrict
187 qualifications. But we do need to preserve volatile qualifiers until
188 we can annotate the folded dereference itself properly. */
189 if (is_gimple_min_invariant (addr)
190 && (!TREE_THIS_VOLATILE (deref)
191 || TYPE_VOLATILE (TREE_TYPE (addr))))
192 return useless_type_conversion_p (TREE_TYPE (deref),
193 TREE_TYPE (TREE_OPERAND (addr, 0)));
195 /* Else both the address substitution and the folding must result in
196 a valid useless type conversion sequence. */
197 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
198 TREE_TYPE (addr))
199 && useless_type_conversion_p (TREE_TYPE (deref),
200 TREE_TYPE (TREE_OPERAND (addr, 0))));
204 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
205 BASE is an array type. OFFSET is a byte displacement.
207 LOC is the location of the original expression. */
209 static tree
210 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
212 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
213 tree array_type, elt_type, elt_size;
214 tree domain_type;
216 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
217 measured in units of the size of elements type) from that ARRAY_REF).
218 We can't do anything if either is variable.
220 The case we handle here is *(&A[N]+O). */
221 if (TREE_CODE (base) == ARRAY_REF)
223 tree low_bound = array_ref_low_bound (base);
225 elt_offset = TREE_OPERAND (base, 1);
226 if (TREE_CODE (low_bound) != INTEGER_CST
227 || TREE_CODE (elt_offset) != INTEGER_CST)
228 return NULL_TREE;
230 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
231 base = TREE_OPERAND (base, 0);
234 /* Ignore stupid user tricks of indexing non-array variables. */
235 array_type = TREE_TYPE (base);
236 if (TREE_CODE (array_type) != ARRAY_TYPE)
237 return NULL_TREE;
238 elt_type = TREE_TYPE (array_type);
240 /* Use signed size type for intermediate computation on the index. */
241 idx_type = ssizetype;
243 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
244 element type (so we can use the alignment if it's not constant).
245 Otherwise, compute the offset as an index by using a division. If the
246 division isn't exact, then don't do anything. */
247 elt_size = TYPE_SIZE_UNIT (elt_type);
248 if (!elt_size)
249 return NULL;
250 if (integer_zerop (offset))
252 if (TREE_CODE (elt_size) != INTEGER_CST)
253 elt_size = size_int (TYPE_ALIGN (elt_type));
255 idx = build_int_cst (idx_type, 0);
257 else
259 unsigned HOST_WIDE_INT lquo, lrem;
260 HOST_WIDE_INT hquo, hrem;
261 double_int soffset;
263 /* The final array offset should be signed, so we need
264 to sign-extend the (possibly pointer) offset here
265 and use signed division. */
266 soffset = double_int_sext (tree_to_double_int (offset),
267 TYPE_PRECISION (TREE_TYPE (offset)));
268 if (TREE_CODE (elt_size) != INTEGER_CST
269 || div_and_round_double (TRUNC_DIV_EXPR, 0,
270 soffset.low, soffset.high,
271 TREE_INT_CST_LOW (elt_size),
272 TREE_INT_CST_HIGH (elt_size),
273 &lquo, &hquo, &lrem, &hrem)
274 || lrem || hrem)
275 return NULL_TREE;
277 idx = build_int_cst_wide (idx_type, lquo, hquo);
280 /* Assume the low bound is zero. If there is a domain type, get the
281 low bound, if any, convert the index into that type, and add the
282 low bound. */
283 min_idx = build_int_cst (idx_type, 0);
284 domain_type = TYPE_DOMAIN (array_type);
285 if (domain_type)
287 idx_type = domain_type;
288 if (TYPE_MIN_VALUE (idx_type))
289 min_idx = TYPE_MIN_VALUE (idx_type);
290 else
291 min_idx = fold_convert (idx_type, min_idx);
293 if (TREE_CODE (min_idx) != INTEGER_CST)
294 return NULL_TREE;
296 elt_offset = fold_convert (idx_type, elt_offset);
299 if (!integer_zerop (min_idx))
300 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
301 if (!integer_zerop (elt_offset))
302 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
304 /* Make sure to possibly truncate late after offsetting. */
305 idx = fold_convert (idx_type, idx);
307 /* We don't want to construct access past array bounds. For example
308 char *(c[4]);
309 c[3][2];
310 should not be simplified into (*c)[14] or tree-vrp will
311 give false warnings.
312 This is only an issue for multi-dimensional arrays. */
313 if (TREE_CODE (elt_type) == ARRAY_TYPE
314 && domain_type)
316 if (TYPE_MAX_VALUE (domain_type)
317 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
318 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
319 return NULL_TREE;
320 else if (TYPE_MIN_VALUE (domain_type)
321 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
322 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
323 return NULL_TREE;
324 else if (compare_tree_int (idx, 0) < 0)
325 return NULL_TREE;
329 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
330 SET_EXPR_LOCATION (t, loc);
331 return t;
336 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
337 LOC is the location of original expression.
339 Before attempting the conversion strip off existing ADDR_EXPRs. */
341 tree
342 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
343 tree orig_type)
345 tree ret;
347 STRIP_NOPS (base);
348 if (TREE_CODE (base) != ADDR_EXPR)
349 return NULL_TREE;
351 base = TREE_OPERAND (base, 0);
352 if (types_compatible_p (orig_type, TREE_TYPE (base))
353 && integer_zerop (offset))
354 return base;
356 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
357 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
358 return ret;
359 return NULL_TREE;
362 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
363 LOC is the location of the original expression. */
365 tree
366 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
367 tree orig_type)
369 tree base, ret;
371 STRIP_NOPS (addr);
372 if (TREE_CODE (addr) != ADDR_EXPR)
373 return NULL_TREE;
374 base = TREE_OPERAND (addr, 0);
375 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
376 if (ret)
378 ret = build_fold_addr_expr (ret);
379 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
380 return NULL_TREE;
381 SET_EXPR_LOCATION (ret, loc);
384 return ret;
388 /* A quaint feature extant in our address arithmetic is that there
389 can be hidden type changes here. The type of the result need
390 not be the same as the type of the input pointer.
392 What we're after here is an expression of the form
393 (T *)(&array + const)
394 where array is OP0, const is OP1, RES_TYPE is T and
395 the cast doesn't actually exist, but is implicit in the
396 type of the POINTER_PLUS_EXPR. We'd like to turn this into
397 &array[x]
398 which may be able to propagate further. */
400 tree
401 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
403 tree ptd_type;
404 tree t;
406 /* The first operand should be an ADDR_EXPR. */
407 if (TREE_CODE (op0) != ADDR_EXPR)
408 return NULL_TREE;
409 op0 = TREE_OPERAND (op0, 0);
411 /* It had better be a constant. */
412 if (TREE_CODE (op1) != INTEGER_CST)
414 /* Or op0 should now be A[0] and the non-constant offset defined
415 via a multiplication by the array element size. */
416 if (TREE_CODE (op0) == ARRAY_REF
417 /* As we will end up creating a variable index array access
418 in the outermost array dimension make sure there isn't
419 a more inner array that the index could overflow to. */
420 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
421 && integer_zerop (TREE_OPERAND (op0, 1))
422 && TREE_CODE (op1) == SSA_NAME)
424 gimple offset_def = SSA_NAME_DEF_STMT (op1);
425 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
426 if (!host_integerp (elsz, 1)
427 || !is_gimple_assign (offset_def))
428 return NULL_TREE;
430 /* Do not build array references of something that we can't
431 see the true number of array dimensions for. */
432 if (!DECL_P (TREE_OPERAND (op0, 0))
433 && !handled_component_p (TREE_OPERAND (op0, 0)))
434 return NULL_TREE;
436 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
437 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
438 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
439 return build_fold_addr_expr
440 (build4 (ARRAY_REF, TREE_TYPE (op0),
441 TREE_OPERAND (op0, 0),
442 gimple_assign_rhs1 (offset_def),
443 TREE_OPERAND (op0, 2),
444 TREE_OPERAND (op0, 3)));
445 else if (integer_onep (elsz)
446 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
447 return build_fold_addr_expr
448 (build4 (ARRAY_REF, TREE_TYPE (op0),
449 TREE_OPERAND (op0, 0),
450 op1,
451 TREE_OPERAND (op0, 2),
452 TREE_OPERAND (op0, 3)));
454 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
455 /* Dto. */
456 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
457 && TREE_CODE (op1) == SSA_NAME)
459 gimple offset_def = SSA_NAME_DEF_STMT (op1);
460 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
461 if (!host_integerp (elsz, 1)
462 || !is_gimple_assign (offset_def))
463 return NULL_TREE;
465 /* Do not build array references of something that we can't
466 see the true number of array dimensions for. */
467 if (!DECL_P (op0)
468 && !handled_component_p (op0))
469 return NULL_TREE;
471 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
472 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
473 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
474 return build_fold_addr_expr
475 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
476 op0, gimple_assign_rhs1 (offset_def),
477 integer_zero_node, NULL_TREE));
478 else if (integer_onep (elsz)
479 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
480 return build_fold_addr_expr
481 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
482 op0, op1,
483 integer_zero_node, NULL_TREE));
486 return NULL_TREE;
489 /* If the first operand is an ARRAY_REF, expand it so that we can fold
490 the offset into it. */
491 while (TREE_CODE (op0) == ARRAY_REF)
493 tree array_obj = TREE_OPERAND (op0, 0);
494 tree array_idx = TREE_OPERAND (op0, 1);
495 tree elt_type = TREE_TYPE (op0);
496 tree elt_size = TYPE_SIZE_UNIT (elt_type);
497 tree min_idx;
499 if (TREE_CODE (array_idx) != INTEGER_CST)
500 break;
501 if (TREE_CODE (elt_size) != INTEGER_CST)
502 break;
504 /* Un-bias the index by the min index of the array type. */
505 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
506 if (min_idx)
508 min_idx = TYPE_MIN_VALUE (min_idx);
509 if (min_idx)
511 if (TREE_CODE (min_idx) != INTEGER_CST)
512 break;
514 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
515 if (!integer_zerop (min_idx))
516 array_idx = int_const_binop (MINUS_EXPR, array_idx,
517 min_idx, 0);
521 /* Convert the index to a byte offset. */
522 array_idx = fold_convert (sizetype, array_idx);
523 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
525 /* Update the operands for the next round, or for folding. */
526 op1 = int_const_binop (PLUS_EXPR,
527 array_idx, op1, 0);
528 op0 = array_obj;
531 ptd_type = TREE_TYPE (res_type);
532 /* If we want a pointer to void, reconstruct the reference from the
533 array element type. A pointer to that can be trivially converted
534 to void *. This happens as we fold (void *)(ptr p+ off). */
535 if (VOID_TYPE_P (ptd_type)
536 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
537 ptd_type = TREE_TYPE (TREE_TYPE (op0));
539 /* At which point we can try some of the same things as for indirects. */
540 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
541 if (t)
543 t = build_fold_addr_expr (t);
544 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
545 return NULL_TREE;
546 SET_EXPR_LOCATION (t, loc);
549 return t;
552 /* Subroutine of fold_stmt. We perform several simplifications of the
553 memory reference tree EXPR and make sure to re-gimplify them properly
554 after propagation of constant addresses. IS_LHS is true if the
555 reference is supposed to be an lvalue. */
557 static tree
558 maybe_fold_reference (tree expr, bool is_lhs)
560 tree *t = &expr;
561 tree result;
563 if (!is_lhs
564 && (result = fold_const_aggregate_ref (expr))
565 && is_gimple_min_invariant (result))
566 return result;
568 /* ??? We might want to open-code the relevant remaining cases
569 to avoid using the generic fold. */
570 if (handled_component_p (*t)
571 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
573 tree tem = fold (*t);
574 if (tem != *t)
575 return tem;
578 while (handled_component_p (*t))
579 t = &TREE_OPERAND (*t, 0);
581 /* Fold back MEM_REFs to reference trees. */
582 if (TREE_CODE (*t) == MEM_REF
583 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
584 && integer_zerop (TREE_OPERAND (*t, 1))
585 && (TREE_THIS_VOLATILE (*t)
586 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
587 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
588 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
589 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
590 /* We have to look out here to not drop a required conversion
591 from the rhs to the lhs if is_lhs, but we don't have the
592 rhs here to verify that. Thus require strict type
593 compatibility. */
594 && types_compatible_p (TREE_TYPE (*t),
595 TREE_TYPE (TREE_OPERAND
596 (TREE_OPERAND (*t, 0), 0))))
598 tree tem;
599 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
600 tem = maybe_fold_reference (expr, is_lhs);
601 if (tem)
602 return tem;
603 return expr;
605 /* Canonicalize MEM_REFs invariant address operand. */
606 else if (TREE_CODE (*t) == MEM_REF
607 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
609 bool volatile_p = TREE_THIS_VOLATILE (*t);
610 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
611 TREE_OPERAND (*t, 0),
612 TREE_OPERAND (*t, 1));
613 if (tem)
615 TREE_THIS_VOLATILE (tem) = volatile_p;
616 *t = tem;
617 tem = maybe_fold_reference (expr, is_lhs);
618 if (tem)
619 return tem;
620 return expr;
623 else if (TREE_CODE (*t) == TARGET_MEM_REF)
625 tree tem = maybe_fold_tmr (*t);
626 if (tem)
628 *t = tem;
629 tem = maybe_fold_reference (expr, is_lhs);
630 if (tem)
631 return tem;
632 return expr;
635 else if (!is_lhs
636 && DECL_P (*t))
638 tree tem = get_symbol_constant_value (*t);
639 if (tem
640 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
642 *t = unshare_expr (tem);
643 tem = maybe_fold_reference (expr, is_lhs);
644 if (tem)
645 return tem;
646 return expr;
650 return NULL_TREE;
654 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
655 replacement rhs for the statement or NULL_TREE if no simplification
656 could be made. It is assumed that the operands have been previously
657 folded. */
659 static tree
660 fold_gimple_assign (gimple_stmt_iterator *si)
662 gimple stmt = gsi_stmt (*si);
663 enum tree_code subcode = gimple_assign_rhs_code (stmt);
664 location_t loc = gimple_location (stmt);
666 tree result = NULL_TREE;
668 switch (get_gimple_rhs_class (subcode))
670 case GIMPLE_SINGLE_RHS:
672 tree rhs = gimple_assign_rhs1 (stmt);
674 /* Try to fold a conditional expression. */
675 if (TREE_CODE (rhs) == COND_EXPR)
677 tree op0 = COND_EXPR_COND (rhs);
678 tree tem;
679 bool set = false;
680 location_t cond_loc = EXPR_LOCATION (rhs);
682 if (COMPARISON_CLASS_P (op0))
684 fold_defer_overflow_warnings ();
685 tem = fold_binary_loc (cond_loc,
686 TREE_CODE (op0), TREE_TYPE (op0),
687 TREE_OPERAND (op0, 0),
688 TREE_OPERAND (op0, 1));
689 /* This is actually a conditional expression, not a GIMPLE
690 conditional statement, however, the valid_gimple_rhs_p
691 test still applies. */
692 set = (tem && is_gimple_condexpr (tem)
693 && valid_gimple_rhs_p (tem));
694 fold_undefer_overflow_warnings (set, stmt, 0);
696 else if (is_gimple_min_invariant (op0))
698 tem = op0;
699 set = true;
701 else
702 return NULL_TREE;
704 if (set)
705 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
706 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
709 else if (REFERENCE_CLASS_P (rhs))
710 return maybe_fold_reference (rhs, false);
712 else if (TREE_CODE (rhs) == ADDR_EXPR)
714 tree ref = TREE_OPERAND (rhs, 0);
715 tree tem = maybe_fold_reference (ref, true);
716 if (tem
717 && TREE_CODE (tem) == MEM_REF
718 && integer_zerop (TREE_OPERAND (tem, 1)))
719 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
720 else if (tem)
721 result = fold_convert (TREE_TYPE (rhs),
722 build_fold_addr_expr_loc (loc, tem));
723 else if (TREE_CODE (ref) == MEM_REF
724 && integer_zerop (TREE_OPERAND (ref, 1)))
725 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
728 else if (TREE_CODE (rhs) == CONSTRUCTOR
729 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
730 && (CONSTRUCTOR_NELTS (rhs)
731 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
733 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
734 unsigned i;
735 tree val;
737 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
738 if (TREE_CODE (val) != INTEGER_CST
739 && TREE_CODE (val) != REAL_CST
740 && TREE_CODE (val) != FIXED_CST)
741 return NULL_TREE;
743 return build_vector_from_ctor (TREE_TYPE (rhs),
744 CONSTRUCTOR_ELTS (rhs));
747 else if (DECL_P (rhs))
748 return unshare_expr (get_symbol_constant_value (rhs));
750 /* If we couldn't fold the RHS, hand over to the generic
751 fold routines. */
752 if (result == NULL_TREE)
753 result = fold (rhs);
755 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
756 that may have been added by fold, and "useless" type
757 conversions that might now be apparent due to propagation. */
758 STRIP_USELESS_TYPE_CONVERSION (result);
760 if (result != rhs && valid_gimple_rhs_p (result))
761 return result;
763 return NULL_TREE;
765 break;
767 case GIMPLE_UNARY_RHS:
769 tree rhs = gimple_assign_rhs1 (stmt);
771 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
772 if (result)
774 /* If the operation was a conversion do _not_ mark a
775 resulting constant with TREE_OVERFLOW if the original
776 constant was not. These conversions have implementation
777 defined behavior and retaining the TREE_OVERFLOW flag
778 here would confuse later passes such as VRP. */
779 if (CONVERT_EXPR_CODE_P (subcode)
780 && TREE_CODE (result) == INTEGER_CST
781 && TREE_CODE (rhs) == INTEGER_CST)
782 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
784 STRIP_USELESS_TYPE_CONVERSION (result);
785 if (valid_gimple_rhs_p (result))
786 return result;
788 else if (CONVERT_EXPR_CODE_P (subcode)
789 && POINTER_TYPE_P (gimple_expr_type (stmt))
790 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
792 tree type = gimple_expr_type (stmt);
793 tree t = maybe_fold_offset_to_address (loc,
794 gimple_assign_rhs1 (stmt),
795 integer_zero_node, type);
796 if (t)
797 return t;
800 break;
802 case GIMPLE_BINARY_RHS:
803 /* Try to fold pointer addition. */
804 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
806 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
807 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
809 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
810 if (!useless_type_conversion_p
811 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
812 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
814 result = maybe_fold_stmt_addition (gimple_location (stmt),
815 type,
816 gimple_assign_rhs1 (stmt),
817 gimple_assign_rhs2 (stmt));
820 if (!result)
821 result = fold_binary_loc (loc, subcode,
822 TREE_TYPE (gimple_assign_lhs (stmt)),
823 gimple_assign_rhs1 (stmt),
824 gimple_assign_rhs2 (stmt));
826 if (result)
828 STRIP_USELESS_TYPE_CONVERSION (result);
829 if (valid_gimple_rhs_p (result))
830 return result;
832 /* Fold might have produced non-GIMPLE, so if we trust it blindly
833 we lose canonicalization opportunities. Do not go again
834 through fold here though, or the same non-GIMPLE will be
835 produced. */
836 if (commutative_tree_code (subcode)
837 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
838 gimple_assign_rhs2 (stmt), false))
839 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
840 gimple_assign_rhs2 (stmt),
841 gimple_assign_rhs1 (stmt));
843 break;
845 case GIMPLE_TERNARY_RHS:
846 result = fold_ternary_loc (loc, subcode,
847 TREE_TYPE (gimple_assign_lhs (stmt)),
848 gimple_assign_rhs1 (stmt),
849 gimple_assign_rhs2 (stmt),
850 gimple_assign_rhs3 (stmt));
852 if (result)
854 STRIP_USELESS_TYPE_CONVERSION (result);
855 if (valid_gimple_rhs_p (result))
856 return result;
858 /* Fold might have produced non-GIMPLE, so if we trust it blindly
859 we lose canonicalization opportunities. Do not go again
860 through fold here though, or the same non-GIMPLE will be
861 produced. */
862 if (commutative_ternary_tree_code (subcode)
863 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
864 gimple_assign_rhs2 (stmt), false))
865 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
866 gimple_assign_rhs2 (stmt),
867 gimple_assign_rhs1 (stmt),
868 gimple_assign_rhs3 (stmt));
870 break;
872 case GIMPLE_INVALID_RHS:
873 gcc_unreachable ();
876 return NULL_TREE;
879 /* Attempt to fold a conditional statement. Return true if any changes were
880 made. We only attempt to fold the condition expression, and do not perform
881 any transformation that would require alteration of the cfg. It is
882 assumed that the operands have been previously folded. */
884 static bool
885 fold_gimple_cond (gimple stmt)
887 tree result = fold_binary_loc (gimple_location (stmt),
888 gimple_cond_code (stmt),
889 boolean_type_node,
890 gimple_cond_lhs (stmt),
891 gimple_cond_rhs (stmt));
893 if (result)
895 STRIP_USELESS_TYPE_CONVERSION (result);
896 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
898 gimple_cond_set_condition_from_tree (stmt, result);
899 return true;
903 return false;
906 /* Convert EXPR into a GIMPLE value suitable for substitution on the
907 RHS of an assignment. Insert the necessary statements before
908 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
909 is replaced. If the call is expected to produces a result, then it
910 is replaced by an assignment of the new RHS to the result variable.
911 If the result is to be ignored, then the call is replaced by a
912 GIMPLE_NOP. A proper VDEF chain is retained by making the first
913 VUSE and the last VDEF of the whole sequence be the same as the replaced
914 statement and using new SSA names for stores in between. */
916 void
917 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
919 tree lhs;
920 tree tmp = NULL_TREE; /* Silence warning. */
921 gimple stmt, new_stmt;
922 gimple_stmt_iterator i;
923 gimple_seq stmts = gimple_seq_alloc();
924 struct gimplify_ctx gctx;
925 gimple last = NULL;
926 gimple laststore = NULL;
927 tree reaching_vuse;
929 stmt = gsi_stmt (*si_p);
931 gcc_assert (is_gimple_call (stmt));
933 lhs = gimple_call_lhs (stmt);
934 reaching_vuse = gimple_vuse (stmt);
936 push_gimplify_context (&gctx);
938 if (lhs == NULL_TREE)
940 gimplify_and_add (expr, &stmts);
941 /* We can end up with folding a memcpy of an empty class assignment
942 which gets optimized away by C++ gimplification. */
943 if (gimple_seq_empty_p (stmts))
945 pop_gimplify_context (NULL);
946 if (gimple_in_ssa_p (cfun))
948 unlink_stmt_vdef (stmt);
949 release_defs (stmt);
951 gsi_remove (si_p, true);
952 return;
955 else
956 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
958 pop_gimplify_context (NULL);
960 if (gimple_has_location (stmt))
961 annotate_all_with_location (stmts, gimple_location (stmt));
963 /* The replacement can expose previously unreferenced variables. */
964 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
966 if (last)
968 gsi_insert_before (si_p, last, GSI_NEW_STMT);
969 gsi_next (si_p);
971 new_stmt = gsi_stmt (i);
972 if (gimple_in_ssa_p (cfun))
974 find_new_referenced_vars (new_stmt);
975 mark_symbols_for_renaming (new_stmt);
977 /* If the new statement has a VUSE, update it with exact SSA name we
978 know will reach this one. */
979 if (gimple_vuse (new_stmt))
981 /* If we've also seen a previous store create a new VDEF for
982 the latter one, and make that the new reaching VUSE. */
983 if (laststore)
985 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
986 gimple_set_vdef (laststore, reaching_vuse);
987 update_stmt (laststore);
988 laststore = NULL;
990 gimple_set_vuse (new_stmt, reaching_vuse);
991 gimple_set_modified (new_stmt, true);
993 if (gimple_assign_single_p (new_stmt)
994 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
996 laststore = new_stmt;
998 last = new_stmt;
1001 if (lhs == NULL_TREE)
1003 /* If we replace a call without LHS that has a VDEF and our new
1004 sequence ends with a store we must make that store have the same
1005 vdef in order not to break the sequencing. This can happen
1006 for instance when folding memcpy calls into assignments. */
1007 if (gimple_vdef (stmt) && laststore)
1009 gimple_set_vdef (laststore, gimple_vdef (stmt));
1010 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1011 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1012 update_stmt (laststore);
1014 else if (gimple_in_ssa_p (cfun))
1016 unlink_stmt_vdef (stmt);
1017 release_defs (stmt);
1019 new_stmt = last;
1021 else
1023 if (last)
1025 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1026 gsi_next (si_p);
1028 if (laststore && is_gimple_reg (lhs))
1030 gimple_set_vdef (laststore, gimple_vdef (stmt));
1031 update_stmt (laststore);
1032 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1033 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1034 laststore = NULL;
1036 else if (laststore)
1038 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1039 gimple_set_vdef (laststore, reaching_vuse);
1040 update_stmt (laststore);
1041 laststore = NULL;
1043 new_stmt = gimple_build_assign (lhs, tmp);
1044 if (!is_gimple_reg (tmp))
1045 gimple_set_vuse (new_stmt, reaching_vuse);
1046 if (!is_gimple_reg (lhs))
1048 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1049 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1050 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1052 else if (reaching_vuse == gimple_vuse (stmt))
1053 unlink_stmt_vdef (stmt);
1056 gimple_set_location (new_stmt, gimple_location (stmt));
1057 gsi_replace (si_p, new_stmt, false);
1060 /* Return the string length, maximum string length or maximum value of
1061 ARG in LENGTH.
1062 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1063 is not NULL and, for TYPE == 0, its value is not equal to the length
1064 we determine or if we are unable to determine the length or value,
1065 return false. VISITED is a bitmap of visited variables.
1066 TYPE is 0 if string length should be returned, 1 for maximum string
1067 length and 2 for maximum value ARG can have. */
1069 static bool
1070 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1072 tree var, val;
1073 gimple def_stmt;
1075 if (TREE_CODE (arg) != SSA_NAME)
1077 if (TREE_CODE (arg) == COND_EXPR)
1078 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1079 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1080 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1081 else if (TREE_CODE (arg) == ADDR_EXPR
1082 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1083 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1085 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1086 if (TREE_CODE (aop0) == INDIRECT_REF
1087 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1088 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1089 length, visited, type);
1092 if (type == 2)
1094 val = arg;
1095 if (TREE_CODE (val) != INTEGER_CST
1096 || tree_int_cst_sgn (val) < 0)
1097 return false;
1099 else
1100 val = c_strlen (arg, 1);
1101 if (!val)
1102 return false;
1104 if (*length)
1106 if (type > 0)
1108 if (TREE_CODE (*length) != INTEGER_CST
1109 || TREE_CODE (val) != INTEGER_CST)
1110 return false;
1112 if (tree_int_cst_lt (*length, val))
1113 *length = val;
1114 return true;
1116 else if (simple_cst_equal (val, *length) != 1)
1117 return false;
1120 *length = val;
1121 return true;
1124 /* If we were already here, break the infinite cycle. */
1125 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1126 return true;
1128 var = arg;
1129 def_stmt = SSA_NAME_DEF_STMT (var);
1131 switch (gimple_code (def_stmt))
1133 case GIMPLE_ASSIGN:
1134 /* The RHS of the statement defining VAR must either have a
1135 constant length or come from another SSA_NAME with a constant
1136 length. */
1137 if (gimple_assign_single_p (def_stmt)
1138 || gimple_assign_unary_nop_p (def_stmt))
1140 tree rhs = gimple_assign_rhs1 (def_stmt);
1141 return get_maxval_strlen (rhs, length, visited, type);
1143 return false;
1145 case GIMPLE_PHI:
1147 /* All the arguments of the PHI node must have the same constant
1148 length. */
1149 unsigned i;
1151 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1153 tree arg = gimple_phi_arg (def_stmt, i)->def;
1155 /* If this PHI has itself as an argument, we cannot
1156 determine the string length of this argument. However,
1157 if we can find a constant string length for the other
1158 PHI args then we can still be sure that this is a
1159 constant string length. So be optimistic and just
1160 continue with the next argument. */
1161 if (arg == gimple_phi_result (def_stmt))
1162 continue;
1164 if (!get_maxval_strlen (arg, length, visited, type))
1165 return false;
1168 return true;
1170 default:
1171 return false;
1176 /* Fold builtin call in statement STMT. Returns a simplified tree.
1177 We may return a non-constant expression, including another call
1178 to a different function and with different arguments, e.g.,
1179 substituting memcpy for strcpy when the string length is known.
1180 Note that some builtins expand into inline code that may not
1181 be valid in GIMPLE. Callers must take care. */
1183 tree
1184 gimple_fold_builtin (gimple stmt)
1186 tree result, val[3];
1187 tree callee, a;
1188 int arg_idx, type;
1189 bitmap visited;
1190 bool ignore;
1191 int nargs;
1192 location_t loc = gimple_location (stmt);
1194 gcc_assert (is_gimple_call (stmt));
1196 ignore = (gimple_call_lhs (stmt) == NULL);
1198 /* First try the generic builtin folder. If that succeeds, return the
1199 result directly. */
1200 result = fold_call_stmt (stmt, ignore);
1201 if (result)
1203 if (ignore)
1204 STRIP_NOPS (result);
1205 return result;
1208 /* Ignore MD builtins. */
1209 callee = gimple_call_fndecl (stmt);
1210 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1211 return NULL_TREE;
1213 /* Give up for always_inline inline builtins until they are
1214 inlined. */
1215 if (avoid_folding_inline_builtin (callee))
1216 return NULL_TREE;
1218 /* If the builtin could not be folded, and it has no argument list,
1219 we're done. */
1220 nargs = gimple_call_num_args (stmt);
1221 if (nargs == 0)
1222 return NULL_TREE;
1224 /* Limit the work only for builtins we know how to simplify. */
1225 switch (DECL_FUNCTION_CODE (callee))
1227 case BUILT_IN_STRLEN:
1228 case BUILT_IN_FPUTS:
1229 case BUILT_IN_FPUTS_UNLOCKED:
1230 arg_idx = 0;
1231 type = 0;
1232 break;
1233 case BUILT_IN_STRCPY:
1234 case BUILT_IN_STRNCPY:
1235 arg_idx = 1;
1236 type = 0;
1237 break;
1238 case BUILT_IN_MEMCPY_CHK:
1239 case BUILT_IN_MEMPCPY_CHK:
1240 case BUILT_IN_MEMMOVE_CHK:
1241 case BUILT_IN_MEMSET_CHK:
1242 case BUILT_IN_STRNCPY_CHK:
1243 arg_idx = 2;
1244 type = 2;
1245 break;
1246 case BUILT_IN_STRCPY_CHK:
1247 case BUILT_IN_STPCPY_CHK:
1248 arg_idx = 1;
1249 type = 1;
1250 break;
1251 case BUILT_IN_SNPRINTF_CHK:
1252 case BUILT_IN_VSNPRINTF_CHK:
1253 arg_idx = 1;
1254 type = 2;
1255 break;
1256 default:
1257 return NULL_TREE;
1260 if (arg_idx >= nargs)
1261 return NULL_TREE;
1263 /* Try to use the dataflow information gathered by the CCP process. */
1264 visited = BITMAP_ALLOC (NULL);
1265 bitmap_clear (visited);
1267 memset (val, 0, sizeof (val));
1268 a = gimple_call_arg (stmt, arg_idx);
1269 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1270 val[arg_idx] = NULL_TREE;
1272 BITMAP_FREE (visited);
1274 result = NULL_TREE;
1275 switch (DECL_FUNCTION_CODE (callee))
1277 case BUILT_IN_STRLEN:
1278 if (val[0] && nargs == 1)
1280 tree new_val =
1281 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1283 /* If the result is not a valid gimple value, or not a cast
1284 of a valid gimple value, then we cannot use the result. */
1285 if (is_gimple_val (new_val)
1286 || (CONVERT_EXPR_P (new_val)
1287 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1288 return new_val;
1290 break;
1292 case BUILT_IN_STRCPY:
1293 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1294 result = fold_builtin_strcpy (loc, callee,
1295 gimple_call_arg (stmt, 0),
1296 gimple_call_arg (stmt, 1),
1297 val[1]);
1298 break;
1300 case BUILT_IN_STRNCPY:
1301 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1302 result = fold_builtin_strncpy (loc, callee,
1303 gimple_call_arg (stmt, 0),
1304 gimple_call_arg (stmt, 1),
1305 gimple_call_arg (stmt, 2),
1306 val[1]);
1307 break;
1309 case BUILT_IN_FPUTS:
1310 if (nargs == 2)
1311 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1312 gimple_call_arg (stmt, 1),
1313 ignore, false, val[0]);
1314 break;
1316 case BUILT_IN_FPUTS_UNLOCKED:
1317 if (nargs == 2)
1318 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1319 gimple_call_arg (stmt, 1),
1320 ignore, true, val[0]);
1321 break;
1323 case BUILT_IN_MEMCPY_CHK:
1324 case BUILT_IN_MEMPCPY_CHK:
1325 case BUILT_IN_MEMMOVE_CHK:
1326 case BUILT_IN_MEMSET_CHK:
1327 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1328 result = fold_builtin_memory_chk (loc, callee,
1329 gimple_call_arg (stmt, 0),
1330 gimple_call_arg (stmt, 1),
1331 gimple_call_arg (stmt, 2),
1332 gimple_call_arg (stmt, 3),
1333 val[2], ignore,
1334 DECL_FUNCTION_CODE (callee));
1335 break;
1337 case BUILT_IN_STRCPY_CHK:
1338 case BUILT_IN_STPCPY_CHK:
1339 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1340 result = fold_builtin_stxcpy_chk (loc, callee,
1341 gimple_call_arg (stmt, 0),
1342 gimple_call_arg (stmt, 1),
1343 gimple_call_arg (stmt, 2),
1344 val[1], ignore,
1345 DECL_FUNCTION_CODE (callee));
1346 break;
1348 case BUILT_IN_STRNCPY_CHK:
1349 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1350 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1351 gimple_call_arg (stmt, 1),
1352 gimple_call_arg (stmt, 2),
1353 gimple_call_arg (stmt, 3),
1354 val[2]);
1355 break;
1357 case BUILT_IN_SNPRINTF_CHK:
1358 case BUILT_IN_VSNPRINTF_CHK:
1359 if (val[1] && is_gimple_val (val[1]))
1360 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1361 DECL_FUNCTION_CODE (callee));
1362 break;
1364 default:
1365 gcc_unreachable ();
1368 if (result && ignore)
1369 result = fold_ignored_result (result);
1370 return result;
1373 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1374 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1375 KNOWN_BINFO carries the binfo describing the true type of
1376 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1377 with a this adjustment, the constant which should be added to this pointer
1378 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1379 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1381 tree
1382 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
1383 tree *delta, bool refuse_thunks)
1385 HOST_WIDE_INT i;
1386 tree v, fndecl;
1387 struct cgraph_node *node;
1389 v = BINFO_VIRTUALS (known_binfo);
1390 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1391 if (!v)
1392 return NULL_TREE;
1393 i = 0;
1394 while (i != token)
1396 i += (TARGET_VTABLE_USES_DESCRIPTORS
1397 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1398 v = TREE_CHAIN (v);
1401 /* If BV_VCALL_INDEX is non-NULL, give up. */
1402 if (TREE_TYPE (v))
1403 return NULL_TREE;
1405 fndecl = TREE_VALUE (v);
1406 node = cgraph_get_node_or_alias (fndecl);
1407 if (refuse_thunks
1408 && (!node
1409 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1410 thunks are represented by a constant in TREE_PURPOSE of items in
1411 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1412 yet.
1414 FIXME: Remove the following condition once we are able to represent
1415 thunk information on call graph edges. */
1416 || (node->same_body_alias && node->thunk.thunk_p)))
1417 return NULL_TREE;
1419 /* When cgraph node is missing and function is not public, we cannot
1420 devirtualize. This can happen in WHOPR when the actual method
1421 ends up in other partition, because we found devirtualization
1422 possibility too late. */
1423 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1424 return NULL_TREE;
1426 *delta = TREE_PURPOSE (v);
1427 gcc_checking_assert (host_integerp (*delta, 0));
1428 return fndecl;
1431 /* Generate code adjusting the first parameter of a call statement determined
1432 by GSI by DELTA. */
1434 void
1435 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1437 gimple call_stmt = gsi_stmt (*gsi);
1438 tree parm, tmp;
1439 gimple new_stmt;
1441 delta = fold_convert (sizetype, delta);
1442 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1443 parm = gimple_call_arg (call_stmt, 0);
1444 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1445 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1446 add_referenced_var (tmp);
1448 tmp = make_ssa_name (tmp, NULL);
1449 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1450 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1451 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1452 gimple_call_set_arg (call_stmt, 0, tmp);
1455 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1456 The statement may be replaced by another statement, e.g., if the call
1457 simplifies to a constant value. Return true if any changes were made.
1458 It is assumed that the operands have been previously folded. */
1460 bool
1461 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1463 gimple stmt = gsi_stmt (*gsi);
1465 tree callee = gimple_call_fndecl (stmt);
1467 /* Check for builtins that CCP can handle using information not
1468 available in the generic fold routines. */
1469 if (!inplace && callee && DECL_BUILT_IN (callee))
1471 tree result = gimple_fold_builtin (stmt);
1473 if (result)
1475 if (!update_call_from_tree (gsi, result))
1476 gimplify_and_update_call_from_tree (gsi, result);
1477 return true;
1480 return false;
1483 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1484 distinguishes both cases. */
1486 static bool
1487 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1489 bool changed = false;
1490 gimple stmt = gsi_stmt (*gsi);
1491 unsigned i;
1492 gimple_stmt_iterator gsinext = *gsi;
1493 gimple next_stmt;
1495 gsi_next (&gsinext);
1496 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1498 /* Fold the main computation performed by the statement. */
1499 switch (gimple_code (stmt))
1501 case GIMPLE_ASSIGN:
1503 unsigned old_num_ops = gimple_num_ops (stmt);
1504 tree new_rhs = fold_gimple_assign (gsi);
1505 tree lhs = gimple_assign_lhs (stmt);
1506 if (new_rhs
1507 && !useless_type_conversion_p (TREE_TYPE (lhs),
1508 TREE_TYPE (new_rhs)))
1509 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1510 if (new_rhs
1511 && (!inplace
1512 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1514 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1515 changed = true;
1517 break;
1520 case GIMPLE_COND:
1521 changed |= fold_gimple_cond (stmt);
1522 break;
1524 case GIMPLE_CALL:
1525 /* Fold *& in call arguments. */
1526 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1527 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1529 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1530 if (tmp)
1532 gimple_call_set_arg (stmt, i, tmp);
1533 changed = true;
1536 changed |= gimple_fold_call (gsi, inplace);
1537 break;
1539 case GIMPLE_ASM:
1540 /* Fold *& in asm operands. */
1542 size_t noutputs;
1543 const char **oconstraints;
1544 const char *constraint;
1545 bool allows_mem, allows_reg;
1547 noutputs = gimple_asm_noutputs (stmt);
1548 oconstraints = XALLOCAVEC (const char *, noutputs);
1550 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1552 tree link = gimple_asm_output_op (stmt, i);
1553 tree op = TREE_VALUE (link);
1554 oconstraints[i]
1555 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1556 if (REFERENCE_CLASS_P (op)
1557 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1559 TREE_VALUE (link) = op;
1560 changed = true;
1563 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1565 tree link = gimple_asm_input_op (stmt, i);
1566 tree op = TREE_VALUE (link);
1567 constraint
1568 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1569 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1570 oconstraints, &allows_mem, &allows_reg);
1571 if (REFERENCE_CLASS_P (op)
1572 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1573 != NULL_TREE)
1575 TREE_VALUE (link) = op;
1576 changed = true;
1580 break;
1582 case GIMPLE_DEBUG:
1583 if (gimple_debug_bind_p (stmt))
1585 tree val = gimple_debug_bind_get_value (stmt);
1586 if (val
1587 && REFERENCE_CLASS_P (val))
1589 tree tem = maybe_fold_reference (val, false);
1590 if (tem)
1592 gimple_debug_bind_set_value (stmt, tem);
1593 changed = true;
1597 break;
1599 default:;
1602 /* If stmt folds into nothing and it was the last stmt in a bb,
1603 don't call gsi_stmt. */
1604 if (gsi_end_p (*gsi))
1606 gcc_assert (next_stmt == NULL);
1607 return changed;
1610 stmt = gsi_stmt (*gsi);
1612 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1613 as we'd changing the next stmt. */
1614 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1616 tree lhs = gimple_get_lhs (stmt);
1617 if (lhs && REFERENCE_CLASS_P (lhs))
1619 tree new_lhs = maybe_fold_reference (lhs, true);
1620 if (new_lhs)
1622 gimple_set_lhs (stmt, new_lhs);
1623 changed = true;
1628 return changed;
1631 /* Fold the statement pointed to by GSI. In some cases, this function may
1632 replace the whole statement with a new one. Returns true iff folding
1633 makes any changes.
1634 The statement pointed to by GSI should be in valid gimple form but may
1635 be in unfolded state as resulting from for example constant propagation
1636 which can produce *&x = 0. */
1638 bool
1639 fold_stmt (gimple_stmt_iterator *gsi)
1641 return fold_stmt_1 (gsi, false);
1644 /* Perform the minimal folding on statement STMT. Only operations like
1645 *&x created by constant propagation are handled. The statement cannot
1646 be replaced with a new one. Return true if the statement was
1647 changed, false otherwise.
1648 The statement STMT should be in valid gimple form but may
1649 be in unfolded state as resulting from for example constant propagation
1650 which can produce *&x = 0. */
1652 bool
1653 fold_stmt_inplace (gimple stmt)
1655 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1656 bool changed = fold_stmt_1 (&gsi, true);
1657 gcc_assert (gsi_stmt (gsi) == stmt);
1658 return changed;
1661 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1662 if EXPR is null or we don't know how.
1663 If non-null, the result always has boolean type. */
1665 static tree
1666 canonicalize_bool (tree expr, bool invert)
1668 if (!expr)
1669 return NULL_TREE;
1670 else if (invert)
1672 if (integer_nonzerop (expr))
1673 return boolean_false_node;
1674 else if (integer_zerop (expr))
1675 return boolean_true_node;
1676 else if (TREE_CODE (expr) == SSA_NAME)
1677 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1678 build_int_cst (TREE_TYPE (expr), 0));
1679 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1680 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1681 boolean_type_node,
1682 TREE_OPERAND (expr, 0),
1683 TREE_OPERAND (expr, 1));
1684 else
1685 return NULL_TREE;
1687 else
1689 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1690 return expr;
1691 if (integer_nonzerop (expr))
1692 return boolean_true_node;
1693 else if (integer_zerop (expr))
1694 return boolean_false_node;
1695 else if (TREE_CODE (expr) == SSA_NAME)
1696 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1697 build_int_cst (TREE_TYPE (expr), 0));
1698 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1699 return fold_build2 (TREE_CODE (expr),
1700 boolean_type_node,
1701 TREE_OPERAND (expr, 0),
1702 TREE_OPERAND (expr, 1));
1703 else
1704 return NULL_TREE;
1708 /* Check to see if a boolean expression EXPR is logically equivalent to the
1709 comparison (OP1 CODE OP2). Check for various identities involving
1710 SSA_NAMEs. */
1712 static bool
1713 same_bool_comparison_p (const_tree expr, enum tree_code code,
1714 const_tree op1, const_tree op2)
1716 gimple s;
1718 /* The obvious case. */
1719 if (TREE_CODE (expr) == code
1720 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1721 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1722 return true;
1724 /* Check for comparing (name, name != 0) and the case where expr
1725 is an SSA_NAME with a definition matching the comparison. */
1726 if (TREE_CODE (expr) == SSA_NAME
1727 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1729 if (operand_equal_p (expr, op1, 0))
1730 return ((code == NE_EXPR && integer_zerop (op2))
1731 || (code == EQ_EXPR && integer_nonzerop (op2)));
1732 s = SSA_NAME_DEF_STMT (expr);
1733 if (is_gimple_assign (s)
1734 && gimple_assign_rhs_code (s) == code
1735 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1736 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1737 return true;
1740 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1741 of name is a comparison, recurse. */
1742 if (TREE_CODE (op1) == SSA_NAME
1743 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1745 s = SSA_NAME_DEF_STMT (op1);
1746 if (is_gimple_assign (s)
1747 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1749 enum tree_code c = gimple_assign_rhs_code (s);
1750 if ((c == NE_EXPR && integer_zerop (op2))
1751 || (c == EQ_EXPR && integer_nonzerop (op2)))
1752 return same_bool_comparison_p (expr, c,
1753 gimple_assign_rhs1 (s),
1754 gimple_assign_rhs2 (s));
1755 if ((c == EQ_EXPR && integer_zerop (op2))
1756 || (c == NE_EXPR && integer_nonzerop (op2)))
1757 return same_bool_comparison_p (expr,
1758 invert_tree_comparison (c, false),
1759 gimple_assign_rhs1 (s),
1760 gimple_assign_rhs2 (s));
1763 return false;
1766 /* Check to see if two boolean expressions OP1 and OP2 are logically
1767 equivalent. */
1769 static bool
1770 same_bool_result_p (const_tree op1, const_tree op2)
1772 /* Simple cases first. */
1773 if (operand_equal_p (op1, op2, 0))
1774 return true;
1776 /* Check the cases where at least one of the operands is a comparison.
1777 These are a bit smarter than operand_equal_p in that they apply some
1778 identifies on SSA_NAMEs. */
1779 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1780 && same_bool_comparison_p (op1, TREE_CODE (op2),
1781 TREE_OPERAND (op2, 0),
1782 TREE_OPERAND (op2, 1)))
1783 return true;
1784 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1785 && same_bool_comparison_p (op2, TREE_CODE (op1),
1786 TREE_OPERAND (op1, 0),
1787 TREE_OPERAND (op1, 1)))
1788 return true;
1790 /* Default case. */
1791 return false;
1794 /* Forward declarations for some mutually recursive functions. */
1796 static tree
1797 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1798 enum tree_code code2, tree op2a, tree op2b);
1799 static tree
1800 and_var_with_comparison (tree var, bool invert,
1801 enum tree_code code2, tree op2a, tree op2b);
1802 static tree
1803 and_var_with_comparison_1 (gimple stmt,
1804 enum tree_code code2, tree op2a, tree op2b);
1805 static tree
1806 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1807 enum tree_code code2, tree op2a, tree op2b);
1808 static tree
1809 or_var_with_comparison (tree var, bool invert,
1810 enum tree_code code2, tree op2a, tree op2b);
1811 static tree
1812 or_var_with_comparison_1 (gimple stmt,
1813 enum tree_code code2, tree op2a, tree op2b);
1815 /* Helper function for and_comparisons_1: try to simplify the AND of the
1816 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1817 If INVERT is true, invert the value of the VAR before doing the AND.
1818 Return NULL_EXPR if we can't simplify this to a single expression. */
1820 static tree
1821 and_var_with_comparison (tree var, bool invert,
1822 enum tree_code code2, tree op2a, tree op2b)
1824 tree t;
1825 gimple stmt = SSA_NAME_DEF_STMT (var);
1827 /* We can only deal with variables whose definitions are assignments. */
1828 if (!is_gimple_assign (stmt))
1829 return NULL_TREE;
1831 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1832 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1833 Then we only have to consider the simpler non-inverted cases. */
1834 if (invert)
1835 t = or_var_with_comparison_1 (stmt,
1836 invert_tree_comparison (code2, false),
1837 op2a, op2b);
1838 else
1839 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1840 return canonicalize_bool (t, invert);
1843 /* Try to simplify the AND of the ssa variable defined by the assignment
1844 STMT with the comparison specified by (OP2A CODE2 OP2B).
1845 Return NULL_EXPR if we can't simplify this to a single expression. */
1847 static tree
1848 and_var_with_comparison_1 (gimple stmt,
1849 enum tree_code code2, tree op2a, tree op2b)
1851 tree var = gimple_assign_lhs (stmt);
1852 tree true_test_var = NULL_TREE;
1853 tree false_test_var = NULL_TREE;
1854 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1856 /* Check for identities like (var AND (var == 0)) => false. */
1857 if (TREE_CODE (op2a) == SSA_NAME
1858 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1860 if ((code2 == NE_EXPR && integer_zerop (op2b))
1861 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1863 true_test_var = op2a;
1864 if (var == true_test_var)
1865 return var;
1867 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1868 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1870 false_test_var = op2a;
1871 if (var == false_test_var)
1872 return boolean_false_node;
1876 /* If the definition is a comparison, recurse on it. */
1877 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1879 tree t = and_comparisons_1 (innercode,
1880 gimple_assign_rhs1 (stmt),
1881 gimple_assign_rhs2 (stmt),
1882 code2,
1883 op2a,
1884 op2b);
1885 if (t)
1886 return t;
1889 /* If the definition is an AND or OR expression, we may be able to
1890 simplify by reassociating. */
1891 if (innercode == TRUTH_AND_EXPR
1892 || innercode == TRUTH_OR_EXPR
1893 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1894 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1896 tree inner1 = gimple_assign_rhs1 (stmt);
1897 tree inner2 = gimple_assign_rhs2 (stmt);
1898 gimple s;
1899 tree t;
1900 tree partial = NULL_TREE;
1901 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1903 /* Check for boolean identities that don't require recursive examination
1904 of inner1/inner2:
1905 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1906 inner1 AND (inner1 OR inner2) => inner1
1907 !inner1 AND (inner1 AND inner2) => false
1908 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1909 Likewise for similar cases involving inner2. */
1910 if (inner1 == true_test_var)
1911 return (is_and ? var : inner1);
1912 else if (inner2 == true_test_var)
1913 return (is_and ? var : inner2);
1914 else if (inner1 == false_test_var)
1915 return (is_and
1916 ? boolean_false_node
1917 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1918 else if (inner2 == false_test_var)
1919 return (is_and
1920 ? boolean_false_node
1921 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1923 /* Next, redistribute/reassociate the AND across the inner tests.
1924 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1925 if (TREE_CODE (inner1) == SSA_NAME
1926 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1927 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1928 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1929 gimple_assign_rhs1 (s),
1930 gimple_assign_rhs2 (s),
1931 code2, op2a, op2b)))
1933 /* Handle the AND case, where we are reassociating:
1934 (inner1 AND inner2) AND (op2a code2 op2b)
1935 => (t AND inner2)
1936 If the partial result t is a constant, we win. Otherwise
1937 continue on to try reassociating with the other inner test. */
1938 if (is_and)
1940 if (integer_onep (t))
1941 return inner2;
1942 else if (integer_zerop (t))
1943 return boolean_false_node;
1946 /* Handle the OR case, where we are redistributing:
1947 (inner1 OR inner2) AND (op2a code2 op2b)
1948 => (t OR (inner2 AND (op2a code2 op2b))) */
1949 else if (integer_onep (t))
1950 return boolean_true_node;
1952 /* Save partial result for later. */
1953 partial = t;
1956 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1957 if (TREE_CODE (inner2) == SSA_NAME
1958 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1959 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1960 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1961 gimple_assign_rhs1 (s),
1962 gimple_assign_rhs2 (s),
1963 code2, op2a, op2b)))
1965 /* Handle the AND case, where we are reassociating:
1966 (inner1 AND inner2) AND (op2a code2 op2b)
1967 => (inner1 AND t) */
1968 if (is_and)
1970 if (integer_onep (t))
1971 return inner1;
1972 else if (integer_zerop (t))
1973 return boolean_false_node;
1974 /* If both are the same, we can apply the identity
1975 (x AND x) == x. */
1976 else if (partial && same_bool_result_p (t, partial))
1977 return t;
1980 /* Handle the OR case. where we are redistributing:
1981 (inner1 OR inner2) AND (op2a code2 op2b)
1982 => (t OR (inner1 AND (op2a code2 op2b)))
1983 => (t OR partial) */
1984 else
1986 if (integer_onep (t))
1987 return boolean_true_node;
1988 else if (partial)
1990 /* We already got a simplification for the other
1991 operand to the redistributed OR expression. The
1992 interesting case is when at least one is false.
1993 Or, if both are the same, we can apply the identity
1994 (x OR x) == x. */
1995 if (integer_zerop (partial))
1996 return t;
1997 else if (integer_zerop (t))
1998 return partial;
1999 else if (same_bool_result_p (t, partial))
2000 return t;
2005 return NULL_TREE;
2008 /* Try to simplify the AND of two comparisons defined by
2009 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2010 If this can be done without constructing an intermediate value,
2011 return the resulting tree; otherwise NULL_TREE is returned.
2012 This function is deliberately asymmetric as it recurses on SSA_DEFs
2013 in the first comparison but not the second. */
2015 static tree
2016 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2017 enum tree_code code2, tree op2a, tree op2b)
2019 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2020 if (operand_equal_p (op1a, op2a, 0)
2021 && operand_equal_p (op1b, op2b, 0))
2023 tree t = combine_comparisons (UNKNOWN_LOCATION,
2024 TRUTH_ANDIF_EXPR, code1, code2,
2025 boolean_type_node, op1a, op1b);
2026 if (t)
2027 return t;
2030 /* Likewise the swapped case of the above. */
2031 if (operand_equal_p (op1a, op2b, 0)
2032 && operand_equal_p (op1b, op2a, 0))
2034 tree t = combine_comparisons (UNKNOWN_LOCATION,
2035 TRUTH_ANDIF_EXPR, code1,
2036 swap_tree_comparison (code2),
2037 boolean_type_node, op1a, op1b);
2038 if (t)
2039 return t;
2042 /* If both comparisons are of the same value against constants, we might
2043 be able to merge them. */
2044 if (operand_equal_p (op1a, op2a, 0)
2045 && TREE_CODE (op1b) == INTEGER_CST
2046 && TREE_CODE (op2b) == INTEGER_CST)
2048 int cmp = tree_int_cst_compare (op1b, op2b);
2050 /* If we have (op1a == op1b), we should either be able to
2051 return that or FALSE, depending on whether the constant op1b
2052 also satisfies the other comparison against op2b. */
2053 if (code1 == EQ_EXPR)
2055 bool done = true;
2056 bool val;
2057 switch (code2)
2059 case EQ_EXPR: val = (cmp == 0); break;
2060 case NE_EXPR: val = (cmp != 0); break;
2061 case LT_EXPR: val = (cmp < 0); break;
2062 case GT_EXPR: val = (cmp > 0); break;
2063 case LE_EXPR: val = (cmp <= 0); break;
2064 case GE_EXPR: val = (cmp >= 0); break;
2065 default: done = false;
2067 if (done)
2069 if (val)
2070 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2071 else
2072 return boolean_false_node;
2075 /* Likewise if the second comparison is an == comparison. */
2076 else if (code2 == EQ_EXPR)
2078 bool done = true;
2079 bool val;
2080 switch (code1)
2082 case EQ_EXPR: val = (cmp == 0); break;
2083 case NE_EXPR: val = (cmp != 0); break;
2084 case LT_EXPR: val = (cmp > 0); break;
2085 case GT_EXPR: val = (cmp < 0); break;
2086 case LE_EXPR: val = (cmp >= 0); break;
2087 case GE_EXPR: val = (cmp <= 0); break;
2088 default: done = false;
2090 if (done)
2092 if (val)
2093 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2094 else
2095 return boolean_false_node;
2099 /* Same business with inequality tests. */
2100 else if (code1 == NE_EXPR)
2102 bool val;
2103 switch (code2)
2105 case EQ_EXPR: val = (cmp != 0); break;
2106 case NE_EXPR: val = (cmp == 0); break;
2107 case LT_EXPR: val = (cmp >= 0); break;
2108 case GT_EXPR: val = (cmp <= 0); break;
2109 case LE_EXPR: val = (cmp > 0); break;
2110 case GE_EXPR: val = (cmp < 0); break;
2111 default:
2112 val = false;
2114 if (val)
2115 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2117 else if (code2 == NE_EXPR)
2119 bool val;
2120 switch (code1)
2122 case EQ_EXPR: val = (cmp == 0); break;
2123 case NE_EXPR: val = (cmp != 0); break;
2124 case LT_EXPR: val = (cmp <= 0); break;
2125 case GT_EXPR: val = (cmp >= 0); break;
2126 case LE_EXPR: val = (cmp < 0); break;
2127 case GE_EXPR: val = (cmp > 0); break;
2128 default:
2129 val = false;
2131 if (val)
2132 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2135 /* Chose the more restrictive of two < or <= comparisons. */
2136 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2137 && (code2 == LT_EXPR || code2 == LE_EXPR))
2139 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2140 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2141 else
2142 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2145 /* Likewise chose the more restrictive of two > or >= comparisons. */
2146 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2147 && (code2 == GT_EXPR || code2 == GE_EXPR))
2149 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2150 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2151 else
2152 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2155 /* Check for singleton ranges. */
2156 else if (cmp == 0
2157 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2158 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2159 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2161 /* Check for disjoint ranges. */
2162 else if (cmp <= 0
2163 && (code1 == LT_EXPR || code1 == LE_EXPR)
2164 && (code2 == GT_EXPR || code2 == GE_EXPR))
2165 return boolean_false_node;
2166 else if (cmp >= 0
2167 && (code1 == GT_EXPR || code1 == GE_EXPR)
2168 && (code2 == LT_EXPR || code2 == LE_EXPR))
2169 return boolean_false_node;
2172 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2173 NAME's definition is a truth value. See if there are any simplifications
2174 that can be done against the NAME's definition. */
2175 if (TREE_CODE (op1a) == SSA_NAME
2176 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2177 && (integer_zerop (op1b) || integer_onep (op1b)))
2179 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2180 || (code1 == NE_EXPR && integer_onep (op1b)));
2181 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2182 switch (gimple_code (stmt))
2184 case GIMPLE_ASSIGN:
2185 /* Try to simplify by copy-propagating the definition. */
2186 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2188 case GIMPLE_PHI:
2189 /* If every argument to the PHI produces the same result when
2190 ANDed with the second comparison, we win.
2191 Do not do this unless the type is bool since we need a bool
2192 result here anyway. */
2193 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2195 tree result = NULL_TREE;
2196 unsigned i;
2197 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2199 tree arg = gimple_phi_arg_def (stmt, i);
2201 /* If this PHI has itself as an argument, ignore it.
2202 If all the other args produce the same result,
2203 we're still OK. */
2204 if (arg == gimple_phi_result (stmt))
2205 continue;
2206 else if (TREE_CODE (arg) == INTEGER_CST)
2208 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2210 if (!result)
2211 result = boolean_false_node;
2212 else if (!integer_zerop (result))
2213 return NULL_TREE;
2215 else if (!result)
2216 result = fold_build2 (code2, boolean_type_node,
2217 op2a, op2b);
2218 else if (!same_bool_comparison_p (result,
2219 code2, op2a, op2b))
2220 return NULL_TREE;
2222 else if (TREE_CODE (arg) == SSA_NAME
2223 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2225 tree temp;
2226 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2227 /* In simple cases we can look through PHI nodes,
2228 but we have to be careful with loops.
2229 See PR49073. */
2230 if (! dom_info_available_p (CDI_DOMINATORS)
2231 || gimple_bb (def_stmt) == gimple_bb (stmt)
2232 || dominated_by_p (CDI_DOMINATORS,
2233 gimple_bb (def_stmt),
2234 gimple_bb (stmt)))
2235 return NULL_TREE;
2236 temp = and_var_with_comparison (arg, invert, code2,
2237 op2a, op2b);
2238 if (!temp)
2239 return NULL_TREE;
2240 else if (!result)
2241 result = temp;
2242 else if (!same_bool_result_p (result, temp))
2243 return NULL_TREE;
2245 else
2246 return NULL_TREE;
2248 return result;
2251 default:
2252 break;
2255 return NULL_TREE;
2258 /* Try to simplify the AND of two comparisons, specified by
2259 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2260 If this can be simplified to a single expression (without requiring
2261 introducing more SSA variables to hold intermediate values),
2262 return the resulting tree. Otherwise return NULL_TREE.
2263 If the result expression is non-null, it has boolean type. */
2265 tree
2266 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2267 enum tree_code code2, tree op2a, tree op2b)
2269 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2270 if (t)
2271 return t;
2272 else
2273 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2276 /* Helper function for or_comparisons_1: try to simplify the OR of the
2277 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2278 If INVERT is true, invert the value of VAR before doing the OR.
2279 Return NULL_EXPR if we can't simplify this to a single expression. */
2281 static tree
2282 or_var_with_comparison (tree var, bool invert,
2283 enum tree_code code2, tree op2a, tree op2b)
2285 tree t;
2286 gimple stmt = SSA_NAME_DEF_STMT (var);
2288 /* We can only deal with variables whose definitions are assignments. */
2289 if (!is_gimple_assign (stmt))
2290 return NULL_TREE;
2292 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2293 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2294 Then we only have to consider the simpler non-inverted cases. */
2295 if (invert)
2296 t = and_var_with_comparison_1 (stmt,
2297 invert_tree_comparison (code2, false),
2298 op2a, op2b);
2299 else
2300 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2301 return canonicalize_bool (t, invert);
2304 /* Try to simplify the OR of the ssa variable defined by the assignment
2305 STMT with the comparison specified by (OP2A CODE2 OP2B).
2306 Return NULL_EXPR if we can't simplify this to a single expression. */
2308 static tree
2309 or_var_with_comparison_1 (gimple stmt,
2310 enum tree_code code2, tree op2a, tree op2b)
2312 tree var = gimple_assign_lhs (stmt);
2313 tree true_test_var = NULL_TREE;
2314 tree false_test_var = NULL_TREE;
2315 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2317 /* Check for identities like (var OR (var != 0)) => true . */
2318 if (TREE_CODE (op2a) == SSA_NAME
2319 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2321 if ((code2 == NE_EXPR && integer_zerop (op2b))
2322 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2324 true_test_var = op2a;
2325 if (var == true_test_var)
2326 return var;
2328 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2329 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2331 false_test_var = op2a;
2332 if (var == false_test_var)
2333 return boolean_true_node;
2337 /* If the definition is a comparison, recurse on it. */
2338 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2340 tree t = or_comparisons_1 (innercode,
2341 gimple_assign_rhs1 (stmt),
2342 gimple_assign_rhs2 (stmt),
2343 code2,
2344 op2a,
2345 op2b);
2346 if (t)
2347 return t;
2350 /* If the definition is an AND or OR expression, we may be able to
2351 simplify by reassociating. */
2352 if (innercode == TRUTH_AND_EXPR
2353 || innercode == TRUTH_OR_EXPR
2354 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2355 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2357 tree inner1 = gimple_assign_rhs1 (stmt);
2358 tree inner2 = gimple_assign_rhs2 (stmt);
2359 gimple s;
2360 tree t;
2361 tree partial = NULL_TREE;
2362 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2364 /* Check for boolean identities that don't require recursive examination
2365 of inner1/inner2:
2366 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2367 inner1 OR (inner1 AND inner2) => inner1
2368 !inner1 OR (inner1 OR inner2) => true
2369 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2371 if (inner1 == true_test_var)
2372 return (is_or ? var : inner1);
2373 else if (inner2 == true_test_var)
2374 return (is_or ? var : inner2);
2375 else if (inner1 == false_test_var)
2376 return (is_or
2377 ? boolean_true_node
2378 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2379 else if (inner2 == false_test_var)
2380 return (is_or
2381 ? boolean_true_node
2382 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2384 /* Next, redistribute/reassociate the OR across the inner tests.
2385 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2386 if (TREE_CODE (inner1) == SSA_NAME
2387 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2388 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2389 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2390 gimple_assign_rhs1 (s),
2391 gimple_assign_rhs2 (s),
2392 code2, op2a, op2b)))
2394 /* Handle the OR case, where we are reassociating:
2395 (inner1 OR inner2) OR (op2a code2 op2b)
2396 => (t OR inner2)
2397 If the partial result t is a constant, we win. Otherwise
2398 continue on to try reassociating with the other inner test. */
2399 if (is_or)
2401 if (integer_onep (t))
2402 return boolean_true_node;
2403 else if (integer_zerop (t))
2404 return inner2;
2407 /* Handle the AND case, where we are redistributing:
2408 (inner1 AND inner2) OR (op2a code2 op2b)
2409 => (t AND (inner2 OR (op2a code op2b))) */
2410 else if (integer_zerop (t))
2411 return boolean_false_node;
2413 /* Save partial result for later. */
2414 partial = t;
2417 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2418 if (TREE_CODE (inner2) == SSA_NAME
2419 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2420 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2421 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2422 gimple_assign_rhs1 (s),
2423 gimple_assign_rhs2 (s),
2424 code2, op2a, op2b)))
2426 /* Handle the OR case, where we are reassociating:
2427 (inner1 OR inner2) OR (op2a code2 op2b)
2428 => (inner1 OR t)
2429 => (t OR partial) */
2430 if (is_or)
2432 if (integer_zerop (t))
2433 return inner1;
2434 else if (integer_onep (t))
2435 return boolean_true_node;
2436 /* If both are the same, we can apply the identity
2437 (x OR x) == x. */
2438 else if (partial && same_bool_result_p (t, partial))
2439 return t;
2442 /* Handle the AND case, where we are redistributing:
2443 (inner1 AND inner2) OR (op2a code2 op2b)
2444 => (t AND (inner1 OR (op2a code2 op2b)))
2445 => (t AND partial) */
2446 else
2448 if (integer_zerop (t))
2449 return boolean_false_node;
2450 else if (partial)
2452 /* We already got a simplification for the other
2453 operand to the redistributed AND expression. The
2454 interesting case is when at least one is true.
2455 Or, if both are the same, we can apply the identity
2456 (x AND x) == x. */
2457 if (integer_onep (partial))
2458 return t;
2459 else if (integer_onep (t))
2460 return partial;
2461 else if (same_bool_result_p (t, partial))
2462 return t;
2467 return NULL_TREE;
2470 /* Try to simplify the OR of two comparisons defined by
2471 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2472 If this can be done without constructing an intermediate value,
2473 return the resulting tree; otherwise NULL_TREE is returned.
2474 This function is deliberately asymmetric as it recurses on SSA_DEFs
2475 in the first comparison but not the second. */
2477 static tree
2478 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2479 enum tree_code code2, tree op2a, tree op2b)
2481 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2482 if (operand_equal_p (op1a, op2a, 0)
2483 && operand_equal_p (op1b, op2b, 0))
2485 tree t = combine_comparisons (UNKNOWN_LOCATION,
2486 TRUTH_ORIF_EXPR, code1, code2,
2487 boolean_type_node, op1a, op1b);
2488 if (t)
2489 return t;
2492 /* Likewise the swapped case of the above. */
2493 if (operand_equal_p (op1a, op2b, 0)
2494 && operand_equal_p (op1b, op2a, 0))
2496 tree t = combine_comparisons (UNKNOWN_LOCATION,
2497 TRUTH_ORIF_EXPR, code1,
2498 swap_tree_comparison (code2),
2499 boolean_type_node, op1a, op1b);
2500 if (t)
2501 return t;
2504 /* If both comparisons are of the same value against constants, we might
2505 be able to merge them. */
2506 if (operand_equal_p (op1a, op2a, 0)
2507 && TREE_CODE (op1b) == INTEGER_CST
2508 && TREE_CODE (op2b) == INTEGER_CST)
2510 int cmp = tree_int_cst_compare (op1b, op2b);
2512 /* If we have (op1a != op1b), we should either be able to
2513 return that or TRUE, depending on whether the constant op1b
2514 also satisfies the other comparison against op2b. */
2515 if (code1 == NE_EXPR)
2517 bool done = true;
2518 bool val;
2519 switch (code2)
2521 case EQ_EXPR: val = (cmp == 0); break;
2522 case NE_EXPR: val = (cmp != 0); break;
2523 case LT_EXPR: val = (cmp < 0); break;
2524 case GT_EXPR: val = (cmp > 0); break;
2525 case LE_EXPR: val = (cmp <= 0); break;
2526 case GE_EXPR: val = (cmp >= 0); break;
2527 default: done = false;
2529 if (done)
2531 if (val)
2532 return boolean_true_node;
2533 else
2534 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2537 /* Likewise if the second comparison is a != comparison. */
2538 else if (code2 == NE_EXPR)
2540 bool done = true;
2541 bool val;
2542 switch (code1)
2544 case EQ_EXPR: val = (cmp == 0); break;
2545 case NE_EXPR: val = (cmp != 0); break;
2546 case LT_EXPR: val = (cmp > 0); break;
2547 case GT_EXPR: val = (cmp < 0); break;
2548 case LE_EXPR: val = (cmp >= 0); break;
2549 case GE_EXPR: val = (cmp <= 0); break;
2550 default: done = false;
2552 if (done)
2554 if (val)
2555 return boolean_true_node;
2556 else
2557 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2561 /* See if an equality test is redundant with the other comparison. */
2562 else if (code1 == EQ_EXPR)
2564 bool val;
2565 switch (code2)
2567 case EQ_EXPR: val = (cmp == 0); break;
2568 case NE_EXPR: val = (cmp != 0); break;
2569 case LT_EXPR: val = (cmp < 0); break;
2570 case GT_EXPR: val = (cmp > 0); break;
2571 case LE_EXPR: val = (cmp <= 0); break;
2572 case GE_EXPR: val = (cmp >= 0); break;
2573 default:
2574 val = false;
2576 if (val)
2577 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2579 else if (code2 == EQ_EXPR)
2581 bool val;
2582 switch (code1)
2584 case EQ_EXPR: val = (cmp == 0); break;
2585 case NE_EXPR: val = (cmp != 0); break;
2586 case LT_EXPR: val = (cmp > 0); break;
2587 case GT_EXPR: val = (cmp < 0); break;
2588 case LE_EXPR: val = (cmp >= 0); break;
2589 case GE_EXPR: val = (cmp <= 0); break;
2590 default:
2591 val = false;
2593 if (val)
2594 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2597 /* Chose the less restrictive of two < or <= comparisons. */
2598 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2599 && (code2 == LT_EXPR || code2 == LE_EXPR))
2601 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2602 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2603 else
2604 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2607 /* Likewise chose the less restrictive of two > or >= comparisons. */
2608 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2609 && (code2 == GT_EXPR || code2 == GE_EXPR))
2611 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2612 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2613 else
2614 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2617 /* Check for singleton ranges. */
2618 else if (cmp == 0
2619 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2620 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2621 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2623 /* Check for less/greater pairs that don't restrict the range at all. */
2624 else if (cmp >= 0
2625 && (code1 == LT_EXPR || code1 == LE_EXPR)
2626 && (code2 == GT_EXPR || code2 == GE_EXPR))
2627 return boolean_true_node;
2628 else if (cmp <= 0
2629 && (code1 == GT_EXPR || code1 == GE_EXPR)
2630 && (code2 == LT_EXPR || code2 == LE_EXPR))
2631 return boolean_true_node;
2634 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2635 NAME's definition is a truth value. See if there are any simplifications
2636 that can be done against the NAME's definition. */
2637 if (TREE_CODE (op1a) == SSA_NAME
2638 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2639 && (integer_zerop (op1b) || integer_onep (op1b)))
2641 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2642 || (code1 == NE_EXPR && integer_onep (op1b)));
2643 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2644 switch (gimple_code (stmt))
2646 case GIMPLE_ASSIGN:
2647 /* Try to simplify by copy-propagating the definition. */
2648 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2650 case GIMPLE_PHI:
2651 /* If every argument to the PHI produces the same result when
2652 ORed with the second comparison, we win.
2653 Do not do this unless the type is bool since we need a bool
2654 result here anyway. */
2655 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2657 tree result = NULL_TREE;
2658 unsigned i;
2659 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2661 tree arg = gimple_phi_arg_def (stmt, i);
2663 /* If this PHI has itself as an argument, ignore it.
2664 If all the other args produce the same result,
2665 we're still OK. */
2666 if (arg == gimple_phi_result (stmt))
2667 continue;
2668 else if (TREE_CODE (arg) == INTEGER_CST)
2670 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2672 if (!result)
2673 result = boolean_true_node;
2674 else if (!integer_onep (result))
2675 return NULL_TREE;
2677 else if (!result)
2678 result = fold_build2 (code2, boolean_type_node,
2679 op2a, op2b);
2680 else if (!same_bool_comparison_p (result,
2681 code2, op2a, op2b))
2682 return NULL_TREE;
2684 else if (TREE_CODE (arg) == SSA_NAME
2685 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2687 tree temp;
2688 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2689 /* In simple cases we can look through PHI nodes,
2690 but we have to be careful with loops.
2691 See PR49073. */
2692 if (! dom_info_available_p (CDI_DOMINATORS)
2693 || gimple_bb (def_stmt) == gimple_bb (stmt)
2694 || dominated_by_p (CDI_DOMINATORS,
2695 gimple_bb (def_stmt),
2696 gimple_bb (stmt)))
2697 return NULL_TREE;
2698 temp = or_var_with_comparison (arg, invert, code2,
2699 op2a, op2b);
2700 if (!temp)
2701 return NULL_TREE;
2702 else if (!result)
2703 result = temp;
2704 else if (!same_bool_result_p (result, temp))
2705 return NULL_TREE;
2707 else
2708 return NULL_TREE;
2710 return result;
2713 default:
2714 break;
2717 return NULL_TREE;
2720 /* Try to simplify the OR of two comparisons, specified by
2721 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2722 If this can be simplified to a single expression (without requiring
2723 introducing more SSA variables to hold intermediate values),
2724 return the resulting tree. Otherwise return NULL_TREE.
2725 If the result expression is non-null, it has boolean type. */
2727 tree
2728 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2729 enum tree_code code2, tree op2a, tree op2b)
2731 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2732 if (t)
2733 return t;
2734 else
2735 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);