2011-08-19 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / gimple-fold.c
blob12b2d4e4a4b43b3fa42c98b186d17e3a538b4d12
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
109 return true;
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
115 tree
116 canonicalize_constructor_val (tree cval)
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
121 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
122 TREE_OPERAND (cval, 0),
123 TREE_OPERAND (cval, 1),
124 TREE_TYPE (cval));
125 if (t)
126 cval = t;
128 if (TREE_CODE (cval) == ADDR_EXPR)
130 tree base = get_base_address (TREE_OPERAND (cval, 0));
132 if (base
133 && (TREE_CODE (base) == VAR_DECL
134 || TREE_CODE (base) == FUNCTION_DECL)
135 && !can_refer_decl_in_current_unit_p (base))
136 return NULL_TREE;
137 if (cfun && base && TREE_CODE (base) == VAR_DECL)
138 add_referenced_var (base);
139 /* Fixup types in global initializers. */
140 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
141 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
143 return cval;
146 /* If SYM is a constant variable with known value, return the value.
147 NULL_TREE is returned otherwise. */
149 tree
150 get_symbol_constant_value (tree sym)
152 if (const_value_known_p (sym))
154 tree val = DECL_INITIAL (sym);
155 if (val)
157 val = canonicalize_constructor_val (val);
158 if (val && is_gimple_min_invariant (val))
159 return val;
160 else
161 return NULL_TREE;
163 /* Variables declared 'const' without an initializer
164 have zero as the initializer if they may not be
165 overridden at link or run time. */
166 if (!val
167 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
168 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
169 return build_zero_cst (TREE_TYPE (sym));
172 return NULL_TREE;
176 /* Return true if we may propagate the address expression ADDR into the
177 dereference DEREF and cancel them. */
179 bool
180 may_propagate_address_into_dereference (tree addr, tree deref)
182 gcc_assert (TREE_CODE (deref) == MEM_REF
183 && TREE_CODE (addr) == ADDR_EXPR);
185 /* Don't propagate if ADDR's operand has incomplete type. */
186 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
187 return false;
189 /* If the address is invariant then we do not need to preserve restrict
190 qualifications. But we do need to preserve volatile qualifiers until
191 we can annotate the folded dereference itself properly. */
192 if (is_gimple_min_invariant (addr)
193 && (!TREE_THIS_VOLATILE (deref)
194 || TYPE_VOLATILE (TREE_TYPE (addr))))
195 return useless_type_conversion_p (TREE_TYPE (deref),
196 TREE_TYPE (TREE_OPERAND (addr, 0)));
198 /* Else both the address substitution and the folding must result in
199 a valid useless type conversion sequence. */
200 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
201 TREE_TYPE (addr))
202 && useless_type_conversion_p (TREE_TYPE (deref),
203 TREE_TYPE (TREE_OPERAND (addr, 0))));
207 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
208 BASE is an array type. OFFSET is a byte displacement.
210 LOC is the location of the original expression. */
212 static tree
213 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
215 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
216 tree array_type, elt_type, elt_size;
217 tree domain_type;
219 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
220 measured in units of the size of elements type) from that ARRAY_REF).
221 We can't do anything if either is variable.
223 The case we handle here is *(&A[N]+O). */
224 if (TREE_CODE (base) == ARRAY_REF)
226 tree low_bound = array_ref_low_bound (base);
228 elt_offset = TREE_OPERAND (base, 1);
229 if (TREE_CODE (low_bound) != INTEGER_CST
230 || TREE_CODE (elt_offset) != INTEGER_CST)
231 return NULL_TREE;
233 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound);
234 base = TREE_OPERAND (base, 0);
237 /* Ignore stupid user tricks of indexing non-array variables. */
238 array_type = TREE_TYPE (base);
239 if (TREE_CODE (array_type) != ARRAY_TYPE)
240 return NULL_TREE;
241 elt_type = TREE_TYPE (array_type);
243 /* Use signed size type for intermediate computation on the index. */
244 idx_type = ssizetype;
246 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
247 element type (so we can use the alignment if it's not constant).
248 Otherwise, compute the offset as an index by using a division. If the
249 division isn't exact, then don't do anything. */
250 elt_size = TYPE_SIZE_UNIT (elt_type);
251 if (!elt_size)
252 return NULL;
253 if (integer_zerop (offset))
255 if (TREE_CODE (elt_size) != INTEGER_CST)
256 elt_size = size_int (TYPE_ALIGN (elt_type));
258 idx = build_int_cst (idx_type, 0);
260 else
262 unsigned HOST_WIDE_INT lquo, lrem;
263 HOST_WIDE_INT hquo, hrem;
264 double_int soffset;
266 /* The final array offset should be signed, so we need
267 to sign-extend the (possibly pointer) offset here
268 and use signed division. */
269 soffset = double_int_sext (tree_to_double_int (offset),
270 TYPE_PRECISION (TREE_TYPE (offset)));
271 if (TREE_CODE (elt_size) != INTEGER_CST
272 || div_and_round_double (TRUNC_DIV_EXPR, 0,
273 soffset.low, soffset.high,
274 TREE_INT_CST_LOW (elt_size),
275 TREE_INT_CST_HIGH (elt_size),
276 &lquo, &hquo, &lrem, &hrem)
277 || lrem || hrem)
278 return NULL_TREE;
280 idx = build_int_cst_wide (idx_type, lquo, hquo);
283 /* Assume the low bound is zero. If there is a domain type, get the
284 low bound, if any, convert the index into that type, and add the
285 low bound. */
286 min_idx = build_int_cst (idx_type, 0);
287 domain_type = TYPE_DOMAIN (array_type);
288 if (domain_type)
290 idx_type = domain_type;
291 if (TYPE_MIN_VALUE (idx_type))
292 min_idx = TYPE_MIN_VALUE (idx_type);
293 else
294 min_idx = fold_convert (idx_type, min_idx);
296 if (TREE_CODE (min_idx) != INTEGER_CST)
297 return NULL_TREE;
299 elt_offset = fold_convert (idx_type, elt_offset);
302 if (!integer_zerop (min_idx))
303 idx = int_const_binop (PLUS_EXPR, idx, min_idx);
304 if (!integer_zerop (elt_offset))
305 idx = int_const_binop (PLUS_EXPR, idx, elt_offset);
307 /* Make sure to possibly truncate late after offsetting. */
308 idx = fold_convert (idx_type, idx);
310 /* We don't want to construct access past array bounds. For example
311 char *(c[4]);
312 c[3][2];
313 should not be simplified into (*c)[14] or tree-vrp will
314 give false warnings.
315 This is only an issue for multi-dimensional arrays. */
316 if (TREE_CODE (elt_type) == ARRAY_TYPE
317 && domain_type)
319 if (TYPE_MAX_VALUE (domain_type)
320 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
321 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
322 return NULL_TREE;
323 else if (TYPE_MIN_VALUE (domain_type)
324 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
325 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
326 return NULL_TREE;
327 else if (compare_tree_int (idx, 0) < 0)
328 return NULL_TREE;
332 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
333 SET_EXPR_LOCATION (t, loc);
334 return t;
339 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
340 LOC is the location of original expression.
342 Before attempting the conversion strip off existing ADDR_EXPRs. */
344 tree
345 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
346 tree orig_type)
348 tree ret;
350 STRIP_NOPS (base);
351 if (TREE_CODE (base) != ADDR_EXPR)
352 return NULL_TREE;
354 base = TREE_OPERAND (base, 0);
355 if (types_compatible_p (orig_type, TREE_TYPE (base))
356 && integer_zerop (offset))
357 return base;
359 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
360 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
361 return ret;
362 return NULL_TREE;
365 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
366 LOC is the location of the original expression. */
368 tree
369 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
370 tree orig_type)
372 tree base, ret;
374 STRIP_NOPS (addr);
375 if (TREE_CODE (addr) != ADDR_EXPR)
376 return NULL_TREE;
377 base = TREE_OPERAND (addr, 0);
378 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
379 if (ret)
381 ret = build_fold_addr_expr (ret);
382 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
383 return NULL_TREE;
384 SET_EXPR_LOCATION (ret, loc);
387 return ret;
391 /* A quaint feature extant in our address arithmetic is that there
392 can be hidden type changes here. The type of the result need
393 not be the same as the type of the input pointer.
395 What we're after here is an expression of the form
396 (T *)(&array + const)
397 where array is OP0, const is OP1, RES_TYPE is T and
398 the cast doesn't actually exist, but is implicit in the
399 type of the POINTER_PLUS_EXPR. We'd like to turn this into
400 &array[x]
401 which may be able to propagate further. */
403 tree
404 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
406 tree ptd_type;
407 tree t;
409 /* The first operand should be an ADDR_EXPR. */
410 if (TREE_CODE (op0) != ADDR_EXPR)
411 return NULL_TREE;
412 op0 = TREE_OPERAND (op0, 0);
414 /* It had better be a constant. */
415 if (TREE_CODE (op1) != INTEGER_CST)
417 /* Or op0 should now be A[0] and the non-constant offset defined
418 via a multiplication by the array element size. */
419 if (TREE_CODE (op0) == ARRAY_REF
420 /* As we will end up creating a variable index array access
421 in the outermost array dimension make sure there isn't
422 a more inner array that the index could overflow to. */
423 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
424 && integer_zerop (TREE_OPERAND (op0, 1))
425 && TREE_CODE (op1) == SSA_NAME)
427 gimple offset_def = SSA_NAME_DEF_STMT (op1);
428 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
429 if (!host_integerp (elsz, 1)
430 || !is_gimple_assign (offset_def))
431 return NULL_TREE;
433 /* Do not build array references of something that we can't
434 see the true number of array dimensions for. */
435 if (!DECL_P (TREE_OPERAND (op0, 0))
436 && !handled_component_p (TREE_OPERAND (op0, 0)))
437 return NULL_TREE;
439 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
440 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
441 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
442 return build_fold_addr_expr
443 (build4 (ARRAY_REF, TREE_TYPE (op0),
444 TREE_OPERAND (op0, 0),
445 gimple_assign_rhs1 (offset_def),
446 TREE_OPERAND (op0, 2),
447 TREE_OPERAND (op0, 3)));
448 else if (integer_onep (elsz)
449 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
450 return build_fold_addr_expr
451 (build4 (ARRAY_REF, TREE_TYPE (op0),
452 TREE_OPERAND (op0, 0),
453 op1,
454 TREE_OPERAND (op0, 2),
455 TREE_OPERAND (op0, 3)));
457 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
458 /* Dto. */
459 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
460 && TREE_CODE (op1) == SSA_NAME)
462 gimple offset_def = SSA_NAME_DEF_STMT (op1);
463 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
464 if (!host_integerp (elsz, 1)
465 || !is_gimple_assign (offset_def))
466 return NULL_TREE;
468 /* Do not build array references of something that we can't
469 see the true number of array dimensions for. */
470 if (!DECL_P (op0)
471 && !handled_component_p (op0))
472 return NULL_TREE;
474 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
475 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
476 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
477 return build_fold_addr_expr
478 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
479 op0, gimple_assign_rhs1 (offset_def),
480 integer_zero_node, NULL_TREE));
481 else if (integer_onep (elsz)
482 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
483 return build_fold_addr_expr
484 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
485 op0, op1,
486 integer_zero_node, NULL_TREE));
489 return NULL_TREE;
492 /* If the first operand is an ARRAY_REF, expand it so that we can fold
493 the offset into it. */
494 while (TREE_CODE (op0) == ARRAY_REF)
496 tree array_obj = TREE_OPERAND (op0, 0);
497 tree array_idx = TREE_OPERAND (op0, 1);
498 tree elt_type = TREE_TYPE (op0);
499 tree elt_size = TYPE_SIZE_UNIT (elt_type);
500 tree min_idx;
502 if (TREE_CODE (array_idx) != INTEGER_CST)
503 break;
504 if (TREE_CODE (elt_size) != INTEGER_CST)
505 break;
507 /* Un-bias the index by the min index of the array type. */
508 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
509 if (min_idx)
511 min_idx = TYPE_MIN_VALUE (min_idx);
512 if (min_idx)
514 if (TREE_CODE (min_idx) != INTEGER_CST)
515 break;
517 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
518 if (!integer_zerop (min_idx))
519 array_idx = int_const_binop (MINUS_EXPR, array_idx,
520 min_idx);
524 /* Convert the index to a byte offset. */
525 array_idx = fold_convert (sizetype, array_idx);
526 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size);
528 /* Update the operands for the next round, or for folding. */
529 op1 = int_const_binop (PLUS_EXPR,
530 array_idx, op1);
531 op0 = array_obj;
534 ptd_type = TREE_TYPE (res_type);
535 /* If we want a pointer to void, reconstruct the reference from the
536 array element type. A pointer to that can be trivially converted
537 to void *. This happens as we fold (void *)(ptr p+ off). */
538 if (VOID_TYPE_P (ptd_type)
539 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
540 ptd_type = TREE_TYPE (TREE_TYPE (op0));
542 /* At which point we can try some of the same things as for indirects. */
543 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
544 if (t)
546 t = build_fold_addr_expr (t);
547 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
548 return NULL_TREE;
549 SET_EXPR_LOCATION (t, loc);
552 return t;
555 /* Subroutine of fold_stmt. We perform several simplifications of the
556 memory reference tree EXPR and make sure to re-gimplify them properly
557 after propagation of constant addresses. IS_LHS is true if the
558 reference is supposed to be an lvalue. */
560 static tree
561 maybe_fold_reference (tree expr, bool is_lhs)
563 tree *t = &expr;
564 tree result;
566 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
567 || TREE_CODE (expr) == REALPART_EXPR
568 || TREE_CODE (expr) == IMAGPART_EXPR)
569 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
570 return fold_unary_loc (EXPR_LOCATION (expr),
571 TREE_CODE (expr),
572 TREE_TYPE (expr),
573 TREE_OPERAND (expr, 0));
574 else if (TREE_CODE (expr) == BIT_FIELD_REF
575 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
576 return fold_ternary_loc (EXPR_LOCATION (expr),
577 TREE_CODE (expr),
578 TREE_TYPE (expr),
579 TREE_OPERAND (expr, 0),
580 TREE_OPERAND (expr, 1),
581 TREE_OPERAND (expr, 2));
583 while (handled_component_p (*t))
584 t = &TREE_OPERAND (*t, 0);
586 /* Canonicalize MEM_REFs invariant address operand. Do this first
587 to avoid feeding non-canonical MEM_REFs elsewhere. */
588 if (TREE_CODE (*t) == MEM_REF
589 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
591 bool volatile_p = TREE_THIS_VOLATILE (*t);
592 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
593 TREE_OPERAND (*t, 0),
594 TREE_OPERAND (*t, 1));
595 if (tem)
597 TREE_THIS_VOLATILE (tem) = volatile_p;
598 *t = tem;
599 tem = maybe_fold_reference (expr, is_lhs);
600 if (tem)
601 return tem;
602 return expr;
606 if (!is_lhs
607 && (result = fold_const_aggregate_ref (expr))
608 && is_gimple_min_invariant (result))
609 return result;
611 /* Fold back MEM_REFs to reference trees. */
612 if (TREE_CODE (*t) == MEM_REF
613 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
614 && integer_zerop (TREE_OPERAND (*t, 1))
615 && (TREE_THIS_VOLATILE (*t)
616 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
617 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
618 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
619 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
620 /* We have to look out here to not drop a required conversion
621 from the rhs to the lhs if is_lhs, but we don't have the
622 rhs here to verify that. Thus require strict type
623 compatibility. */
624 && types_compatible_p (TREE_TYPE (*t),
625 TREE_TYPE (TREE_OPERAND
626 (TREE_OPERAND (*t, 0), 0))))
628 tree tem;
629 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
630 tem = maybe_fold_reference (expr, is_lhs);
631 if (tem)
632 return tem;
633 return expr;
635 else if (TREE_CODE (*t) == TARGET_MEM_REF)
637 tree tem = maybe_fold_tmr (*t);
638 if (tem)
640 *t = tem;
641 tem = maybe_fold_reference (expr, is_lhs);
642 if (tem)
643 return tem;
644 return expr;
648 return NULL_TREE;
652 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
653 replacement rhs for the statement or NULL_TREE if no simplification
654 could be made. It is assumed that the operands have been previously
655 folded. */
657 static tree
658 fold_gimple_assign (gimple_stmt_iterator *si)
660 gimple stmt = gsi_stmt (*si);
661 enum tree_code subcode = gimple_assign_rhs_code (stmt);
662 location_t loc = gimple_location (stmt);
664 tree result = NULL_TREE;
666 switch (get_gimple_rhs_class (subcode))
668 case GIMPLE_SINGLE_RHS:
670 tree rhs = gimple_assign_rhs1 (stmt);
672 /* Try to fold a conditional expression. */
673 if (TREE_CODE (rhs) == COND_EXPR)
675 tree op0 = COND_EXPR_COND (rhs);
676 tree tem;
677 bool set = false;
678 location_t cond_loc = EXPR_LOCATION (rhs);
680 if (COMPARISON_CLASS_P (op0))
682 fold_defer_overflow_warnings ();
683 tem = fold_binary_loc (cond_loc,
684 TREE_CODE (op0), TREE_TYPE (op0),
685 TREE_OPERAND (op0, 0),
686 TREE_OPERAND (op0, 1));
687 /* This is actually a conditional expression, not a GIMPLE
688 conditional statement, however, the valid_gimple_rhs_p
689 test still applies. */
690 set = (tem && is_gimple_condexpr (tem)
691 && valid_gimple_rhs_p (tem));
692 fold_undefer_overflow_warnings (set, stmt, 0);
694 else if (is_gimple_min_invariant (op0))
696 tem = op0;
697 set = true;
699 else
700 return NULL_TREE;
702 if (set)
703 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
704 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
707 else if (REFERENCE_CLASS_P (rhs))
708 return maybe_fold_reference (rhs, false);
710 else if (TREE_CODE (rhs) == ADDR_EXPR)
712 tree ref = TREE_OPERAND (rhs, 0);
713 tree tem = maybe_fold_reference (ref, true);
714 if (tem
715 && TREE_CODE (tem) == MEM_REF
716 && integer_zerop (TREE_OPERAND (tem, 1)))
717 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
718 else if (tem)
719 result = fold_convert (TREE_TYPE (rhs),
720 build_fold_addr_expr_loc (loc, tem));
721 else if (TREE_CODE (ref) == MEM_REF
722 && integer_zerop (TREE_OPERAND (ref, 1)))
723 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
726 else if (TREE_CODE (rhs) == CONSTRUCTOR
727 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
728 && (CONSTRUCTOR_NELTS (rhs)
729 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
731 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
732 unsigned i;
733 tree val;
735 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
736 if (TREE_CODE (val) != INTEGER_CST
737 && TREE_CODE (val) != REAL_CST
738 && TREE_CODE (val) != FIXED_CST)
739 return NULL_TREE;
741 return build_vector_from_ctor (TREE_TYPE (rhs),
742 CONSTRUCTOR_ELTS (rhs));
745 else if (DECL_P (rhs))
746 return unshare_expr (get_symbol_constant_value (rhs));
748 /* If we couldn't fold the RHS, hand over to the generic
749 fold routines. */
750 if (result == NULL_TREE)
751 result = fold (rhs);
753 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
754 that may have been added by fold, and "useless" type
755 conversions that might now be apparent due to propagation. */
756 STRIP_USELESS_TYPE_CONVERSION (result);
758 if (result != rhs && valid_gimple_rhs_p (result))
759 return result;
761 return NULL_TREE;
763 break;
765 case GIMPLE_UNARY_RHS:
767 tree rhs = gimple_assign_rhs1 (stmt);
769 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
770 if (result)
772 /* If the operation was a conversion do _not_ mark a
773 resulting constant with TREE_OVERFLOW if the original
774 constant was not. These conversions have implementation
775 defined behavior and retaining the TREE_OVERFLOW flag
776 here would confuse later passes such as VRP. */
777 if (CONVERT_EXPR_CODE_P (subcode)
778 && TREE_CODE (result) == INTEGER_CST
779 && TREE_CODE (rhs) == INTEGER_CST)
780 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
782 STRIP_USELESS_TYPE_CONVERSION (result);
783 if (valid_gimple_rhs_p (result))
784 return result;
786 else if (CONVERT_EXPR_CODE_P (subcode)
787 && POINTER_TYPE_P (gimple_expr_type (stmt))
788 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
790 tree type = gimple_expr_type (stmt);
791 tree t = maybe_fold_offset_to_address (loc,
792 gimple_assign_rhs1 (stmt),
793 integer_zero_node, type);
794 if (t)
795 return t;
798 break;
800 case GIMPLE_BINARY_RHS:
801 /* Try to fold pointer addition. */
802 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
804 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
805 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
807 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
808 if (!useless_type_conversion_p
809 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
810 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
812 result = maybe_fold_stmt_addition (gimple_location (stmt),
813 type,
814 gimple_assign_rhs1 (stmt),
815 gimple_assign_rhs2 (stmt));
817 /* Try to canonicalize for boolean-typed X the comparisons
818 X == 0, X == 1, X != 0, and X != 1. */
819 else if (gimple_assign_rhs_code (stmt) == EQ_EXPR
820 || gimple_assign_rhs_code (stmt) == NE_EXPR)
822 tree lhs = gimple_assign_lhs (stmt);
823 tree op1 = gimple_assign_rhs1 (stmt);
824 tree op2 = gimple_assign_rhs2 (stmt);
825 tree type = TREE_TYPE (op1);
827 /* Check whether the comparison operands are of the same boolean
828 type as the result type is.
829 Check that second operand is an integer-constant with value
830 one or zero. */
831 if (TREE_CODE (op2) == INTEGER_CST
832 && (integer_zerop (op2) || integer_onep (op2))
833 && useless_type_conversion_p (TREE_TYPE (lhs), type))
835 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
836 bool is_logical_not = false;
838 /* X == 0 and X != 1 is a logical-not.of X
839 X == 1 and X != 0 is X */
840 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
841 || (cmp_code == NE_EXPR && integer_onep (op2)))
842 is_logical_not = true;
844 if (is_logical_not == false)
845 result = op1;
846 /* Only for one-bit precision typed X the transformation
847 !X -> ~X is valied. */
848 else if (TYPE_PRECISION (type) == 1)
849 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
850 type, op1);
851 /* Otherwise we use !X -> X ^ 1. */
852 else
853 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
854 type, op1, build_int_cst (type, 1));
859 if (!result)
860 result = fold_binary_loc (loc, subcode,
861 TREE_TYPE (gimple_assign_lhs (stmt)),
862 gimple_assign_rhs1 (stmt),
863 gimple_assign_rhs2 (stmt));
865 if (result)
867 STRIP_USELESS_TYPE_CONVERSION (result);
868 if (valid_gimple_rhs_p (result))
869 return result;
871 break;
873 case GIMPLE_TERNARY_RHS:
874 result = fold_ternary_loc (loc, subcode,
875 TREE_TYPE (gimple_assign_lhs (stmt)),
876 gimple_assign_rhs1 (stmt),
877 gimple_assign_rhs2 (stmt),
878 gimple_assign_rhs3 (stmt));
880 if (result)
882 STRIP_USELESS_TYPE_CONVERSION (result);
883 if (valid_gimple_rhs_p (result))
884 return result;
886 break;
888 case GIMPLE_INVALID_RHS:
889 gcc_unreachable ();
892 return NULL_TREE;
895 /* Attempt to fold a conditional statement. Return true if any changes were
896 made. We only attempt to fold the condition expression, and do not perform
897 any transformation that would require alteration of the cfg. It is
898 assumed that the operands have been previously folded. */
900 static bool
901 fold_gimple_cond (gimple stmt)
903 tree result = fold_binary_loc (gimple_location (stmt),
904 gimple_cond_code (stmt),
905 boolean_type_node,
906 gimple_cond_lhs (stmt),
907 gimple_cond_rhs (stmt));
909 if (result)
911 STRIP_USELESS_TYPE_CONVERSION (result);
912 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
914 gimple_cond_set_condition_from_tree (stmt, result);
915 return true;
919 return false;
922 /* Convert EXPR into a GIMPLE value suitable for substitution on the
923 RHS of an assignment. Insert the necessary statements before
924 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
925 is replaced. If the call is expected to produces a result, then it
926 is replaced by an assignment of the new RHS to the result variable.
927 If the result is to be ignored, then the call is replaced by a
928 GIMPLE_NOP. A proper VDEF chain is retained by making the first
929 VUSE and the last VDEF of the whole sequence be the same as the replaced
930 statement and using new SSA names for stores in between. */
932 void
933 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
935 tree lhs;
936 tree tmp = NULL_TREE; /* Silence warning. */
937 gimple stmt, new_stmt;
938 gimple_stmt_iterator i;
939 gimple_seq stmts = gimple_seq_alloc();
940 struct gimplify_ctx gctx;
941 gimple last = NULL;
942 gimple laststore = NULL;
943 tree reaching_vuse;
945 stmt = gsi_stmt (*si_p);
947 gcc_assert (is_gimple_call (stmt));
949 lhs = gimple_call_lhs (stmt);
950 reaching_vuse = gimple_vuse (stmt);
952 push_gimplify_context (&gctx);
954 if (lhs == NULL_TREE)
956 gimplify_and_add (expr, &stmts);
957 /* We can end up with folding a memcpy of an empty class assignment
958 which gets optimized away by C++ gimplification. */
959 if (gimple_seq_empty_p (stmts))
961 pop_gimplify_context (NULL);
962 if (gimple_in_ssa_p (cfun))
964 unlink_stmt_vdef (stmt);
965 release_defs (stmt);
967 gsi_remove (si_p, true);
968 return;
971 else
972 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
974 pop_gimplify_context (NULL);
976 if (gimple_has_location (stmt))
977 annotate_all_with_location (stmts, gimple_location (stmt));
979 /* The replacement can expose previously unreferenced variables. */
980 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
982 if (last)
984 gsi_insert_before (si_p, last, GSI_NEW_STMT);
985 gsi_next (si_p);
987 new_stmt = gsi_stmt (i);
988 if (gimple_in_ssa_p (cfun))
990 find_new_referenced_vars (new_stmt);
991 mark_symbols_for_renaming (new_stmt);
993 /* If the new statement has a VUSE, update it with exact SSA name we
994 know will reach this one. */
995 if (gimple_vuse (new_stmt))
997 /* If we've also seen a previous store create a new VDEF for
998 the latter one, and make that the new reaching VUSE. */
999 if (laststore)
1001 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1002 gimple_set_vdef (laststore, reaching_vuse);
1003 update_stmt (laststore);
1004 laststore = NULL;
1006 gimple_set_vuse (new_stmt, reaching_vuse);
1007 gimple_set_modified (new_stmt, true);
1009 if (gimple_assign_single_p (new_stmt)
1010 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
1012 laststore = new_stmt;
1014 last = new_stmt;
1017 if (lhs == NULL_TREE)
1019 /* If we replace a call without LHS that has a VDEF and our new
1020 sequence ends with a store we must make that store have the same
1021 vdef in order not to break the sequencing. This can happen
1022 for instance when folding memcpy calls into assignments. */
1023 if (gimple_vdef (stmt) && laststore)
1025 gimple_set_vdef (laststore, gimple_vdef (stmt));
1026 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1027 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1028 update_stmt (laststore);
1030 else if (gimple_in_ssa_p (cfun))
1032 unlink_stmt_vdef (stmt);
1033 release_defs (stmt);
1035 new_stmt = last;
1037 else
1039 if (last)
1041 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1042 gsi_next (si_p);
1044 if (laststore && is_gimple_reg (lhs))
1046 gimple_set_vdef (laststore, gimple_vdef (stmt));
1047 update_stmt (laststore);
1048 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1049 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1050 laststore = NULL;
1052 else if (laststore)
1054 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1055 gimple_set_vdef (laststore, reaching_vuse);
1056 update_stmt (laststore);
1057 laststore = NULL;
1059 new_stmt = gimple_build_assign (lhs, tmp);
1060 if (!is_gimple_reg (tmp))
1061 gimple_set_vuse (new_stmt, reaching_vuse);
1062 if (!is_gimple_reg (lhs))
1064 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1065 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1066 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1068 else if (reaching_vuse == gimple_vuse (stmt))
1069 unlink_stmt_vdef (stmt);
1072 gimple_set_location (new_stmt, gimple_location (stmt));
1073 gsi_replace (si_p, new_stmt, false);
1076 /* Return the string length, maximum string length or maximum value of
1077 ARG in LENGTH.
1078 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1079 is not NULL and, for TYPE == 0, its value is not equal to the length
1080 we determine or if we are unable to determine the length or value,
1081 return false. VISITED is a bitmap of visited variables.
1082 TYPE is 0 if string length should be returned, 1 for maximum string
1083 length and 2 for maximum value ARG can have. */
1085 static bool
1086 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1088 tree var, val;
1089 gimple def_stmt;
1091 if (TREE_CODE (arg) != SSA_NAME)
1093 if (TREE_CODE (arg) == COND_EXPR)
1094 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1095 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1096 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1097 else if (TREE_CODE (arg) == ADDR_EXPR
1098 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1099 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1101 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1102 if (TREE_CODE (aop0) == INDIRECT_REF
1103 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1104 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1105 length, visited, type);
1108 if (type == 2)
1110 val = arg;
1111 if (TREE_CODE (val) != INTEGER_CST
1112 || tree_int_cst_sgn (val) < 0)
1113 return false;
1115 else
1116 val = c_strlen (arg, 1);
1117 if (!val)
1118 return false;
1120 if (*length)
1122 if (type > 0)
1124 if (TREE_CODE (*length) != INTEGER_CST
1125 || TREE_CODE (val) != INTEGER_CST)
1126 return false;
1128 if (tree_int_cst_lt (*length, val))
1129 *length = val;
1130 return true;
1132 else if (simple_cst_equal (val, *length) != 1)
1133 return false;
1136 *length = val;
1137 return true;
1140 /* If we were already here, break the infinite cycle. */
1141 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1142 return true;
1144 var = arg;
1145 def_stmt = SSA_NAME_DEF_STMT (var);
1147 switch (gimple_code (def_stmt))
1149 case GIMPLE_ASSIGN:
1150 /* The RHS of the statement defining VAR must either have a
1151 constant length or come from another SSA_NAME with a constant
1152 length. */
1153 if (gimple_assign_single_p (def_stmt)
1154 || gimple_assign_unary_nop_p (def_stmt))
1156 tree rhs = gimple_assign_rhs1 (def_stmt);
1157 return get_maxval_strlen (rhs, length, visited, type);
1159 return false;
1161 case GIMPLE_PHI:
1163 /* All the arguments of the PHI node must have the same constant
1164 length. */
1165 unsigned i;
1167 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1169 tree arg = gimple_phi_arg (def_stmt, i)->def;
1171 /* If this PHI has itself as an argument, we cannot
1172 determine the string length of this argument. However,
1173 if we can find a constant string length for the other
1174 PHI args then we can still be sure that this is a
1175 constant string length. So be optimistic and just
1176 continue with the next argument. */
1177 if (arg == gimple_phi_result (def_stmt))
1178 continue;
1180 if (!get_maxval_strlen (arg, length, visited, type))
1181 return false;
1184 return true;
1186 default:
1187 return false;
1192 /* Fold builtin call in statement STMT. Returns a simplified tree.
1193 We may return a non-constant expression, including another call
1194 to a different function and with different arguments, e.g.,
1195 substituting memcpy for strcpy when the string length is known.
1196 Note that some builtins expand into inline code that may not
1197 be valid in GIMPLE. Callers must take care. */
1199 tree
1200 gimple_fold_builtin (gimple stmt)
1202 tree result, val[3];
1203 tree callee, a;
1204 int arg_idx, type;
1205 bitmap visited;
1206 bool ignore;
1207 int nargs;
1208 location_t loc = gimple_location (stmt);
1210 gcc_assert (is_gimple_call (stmt));
1212 ignore = (gimple_call_lhs (stmt) == NULL);
1214 /* First try the generic builtin folder. If that succeeds, return the
1215 result directly. */
1216 result = fold_call_stmt (stmt, ignore);
1217 if (result)
1219 if (ignore)
1220 STRIP_NOPS (result);
1221 return result;
1224 /* Ignore MD builtins. */
1225 callee = gimple_call_fndecl (stmt);
1226 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1227 return NULL_TREE;
1229 /* If the builtin could not be folded, and it has no argument list,
1230 we're done. */
1231 nargs = gimple_call_num_args (stmt);
1232 if (nargs == 0)
1233 return NULL_TREE;
1235 /* Limit the work only for builtins we know how to simplify. */
1236 switch (DECL_FUNCTION_CODE (callee))
1238 case BUILT_IN_STRLEN:
1239 case BUILT_IN_FPUTS:
1240 case BUILT_IN_FPUTS_UNLOCKED:
1241 arg_idx = 0;
1242 type = 0;
1243 break;
1244 case BUILT_IN_STRCPY:
1245 case BUILT_IN_STRNCPY:
1246 arg_idx = 1;
1247 type = 0;
1248 break;
1249 case BUILT_IN_MEMCPY_CHK:
1250 case BUILT_IN_MEMPCPY_CHK:
1251 case BUILT_IN_MEMMOVE_CHK:
1252 case BUILT_IN_MEMSET_CHK:
1253 case BUILT_IN_STRNCPY_CHK:
1254 arg_idx = 2;
1255 type = 2;
1256 break;
1257 case BUILT_IN_STRCPY_CHK:
1258 case BUILT_IN_STPCPY_CHK:
1259 arg_idx = 1;
1260 type = 1;
1261 break;
1262 case BUILT_IN_SNPRINTF_CHK:
1263 case BUILT_IN_VSNPRINTF_CHK:
1264 arg_idx = 1;
1265 type = 2;
1266 break;
1267 default:
1268 return NULL_TREE;
1271 if (arg_idx >= nargs)
1272 return NULL_TREE;
1274 /* Try to use the dataflow information gathered by the CCP process. */
1275 visited = BITMAP_ALLOC (NULL);
1276 bitmap_clear (visited);
1278 memset (val, 0, sizeof (val));
1279 a = gimple_call_arg (stmt, arg_idx);
1280 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1281 val[arg_idx] = NULL_TREE;
1283 BITMAP_FREE (visited);
1285 result = NULL_TREE;
1286 switch (DECL_FUNCTION_CODE (callee))
1288 case BUILT_IN_STRLEN:
1289 if (val[0] && nargs == 1)
1291 tree new_val =
1292 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1294 /* If the result is not a valid gimple value, or not a cast
1295 of a valid gimple value, then we cannot use the result. */
1296 if (is_gimple_val (new_val)
1297 || (CONVERT_EXPR_P (new_val)
1298 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1299 return new_val;
1301 break;
1303 case BUILT_IN_STRCPY:
1304 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1305 result = fold_builtin_strcpy (loc, callee,
1306 gimple_call_arg (stmt, 0),
1307 gimple_call_arg (stmt, 1),
1308 val[1]);
1309 break;
1311 case BUILT_IN_STRNCPY:
1312 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1313 result = fold_builtin_strncpy (loc, callee,
1314 gimple_call_arg (stmt, 0),
1315 gimple_call_arg (stmt, 1),
1316 gimple_call_arg (stmt, 2),
1317 val[1]);
1318 break;
1320 case BUILT_IN_FPUTS:
1321 if (nargs == 2)
1322 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1323 gimple_call_arg (stmt, 1),
1324 ignore, false, val[0]);
1325 break;
1327 case BUILT_IN_FPUTS_UNLOCKED:
1328 if (nargs == 2)
1329 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1330 gimple_call_arg (stmt, 1),
1331 ignore, true, val[0]);
1332 break;
1334 case BUILT_IN_MEMCPY_CHK:
1335 case BUILT_IN_MEMPCPY_CHK:
1336 case BUILT_IN_MEMMOVE_CHK:
1337 case BUILT_IN_MEMSET_CHK:
1338 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1339 result = fold_builtin_memory_chk (loc, callee,
1340 gimple_call_arg (stmt, 0),
1341 gimple_call_arg (stmt, 1),
1342 gimple_call_arg (stmt, 2),
1343 gimple_call_arg (stmt, 3),
1344 val[2], ignore,
1345 DECL_FUNCTION_CODE (callee));
1346 break;
1348 case BUILT_IN_STRCPY_CHK:
1349 case BUILT_IN_STPCPY_CHK:
1350 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1351 result = fold_builtin_stxcpy_chk (loc, callee,
1352 gimple_call_arg (stmt, 0),
1353 gimple_call_arg (stmt, 1),
1354 gimple_call_arg (stmt, 2),
1355 val[1], ignore,
1356 DECL_FUNCTION_CODE (callee));
1357 break;
1359 case BUILT_IN_STRNCPY_CHK:
1360 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1361 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1362 gimple_call_arg (stmt, 1),
1363 gimple_call_arg (stmt, 2),
1364 gimple_call_arg (stmt, 3),
1365 val[2]);
1366 break;
1368 case BUILT_IN_SNPRINTF_CHK:
1369 case BUILT_IN_VSNPRINTF_CHK:
1370 if (val[1] && is_gimple_val (val[1]))
1371 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1372 DECL_FUNCTION_CODE (callee));
1373 break;
1375 default:
1376 gcc_unreachable ();
1379 if (result && ignore)
1380 result = fold_ignored_result (result);
1381 return result;
1384 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1385 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1386 KNOWN_BINFO carries the binfo describing the true type of
1387 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1388 with a this adjustment, the constant which should be added to this pointer
1389 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1390 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1392 tree
1393 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
1394 tree *delta)
1396 HOST_WIDE_INT i;
1397 tree v, fndecl;
1399 v = BINFO_VIRTUALS (known_binfo);
1400 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1401 if (!v)
1402 return NULL_TREE;
1403 i = 0;
1404 while (i != token)
1406 i += (TARGET_VTABLE_USES_DESCRIPTORS
1407 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1408 v = TREE_CHAIN (v);
1411 /* If BV_VCALL_INDEX is non-NULL, give up. */
1412 if (TREE_TYPE (v))
1413 return NULL_TREE;
1415 fndecl = TREE_VALUE (v);
1417 /* When cgraph node is missing and function is not public, we cannot
1418 devirtualize. This can happen in WHOPR when the actual method
1419 ends up in other partition, because we found devirtualization
1420 possibility too late. */
1421 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1422 return NULL_TREE;
1424 *delta = TREE_PURPOSE (v);
1425 gcc_checking_assert (host_integerp (*delta, 0));
1426 return fndecl;
1429 /* Generate code adjusting the first parameter of a call statement determined
1430 by GSI by DELTA. */
1432 void
1433 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1435 gimple call_stmt = gsi_stmt (*gsi);
1436 tree parm, tmp;
1437 gimple new_stmt;
1439 delta = convert_to_ptrofftype (delta);
1440 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1441 parm = gimple_call_arg (call_stmt, 0);
1442 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1443 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1444 add_referenced_var (tmp);
1446 tmp = make_ssa_name (tmp, NULL);
1447 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1448 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1449 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1450 gimple_call_set_arg (call_stmt, 0, tmp);
1453 /* Return a binfo to be used for devirtualization of calls based on an object
1454 represented by a declaration (i.e. a global or automatically allocated one)
1455 or NULL if it cannot be found or is not safe. CST is expected to be an
1456 ADDR_EXPR of such object or the function will return NULL. Currently it is
1457 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1459 tree
1460 gimple_extract_devirt_binfo_from_cst (tree cst)
1462 HOST_WIDE_INT offset, size, max_size;
1463 tree base, type, expected_type, binfo;
1464 bool last_artificial = false;
1466 if (!flag_devirtualize
1467 || TREE_CODE (cst) != ADDR_EXPR
1468 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1469 return NULL_TREE;
1471 cst = TREE_OPERAND (cst, 0);
1472 expected_type = TREE_TYPE (cst);
1473 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1474 type = TREE_TYPE (base);
1475 if (!DECL_P (base)
1476 || max_size == -1
1477 || max_size != size
1478 || TREE_CODE (type) != RECORD_TYPE)
1479 return NULL_TREE;
1481 /* Find the sub-object the constant actually refers to and mark whether it is
1482 an artificial one (as opposed to a user-defined one). */
1483 while (true)
1485 HOST_WIDE_INT pos, size;
1486 tree fld;
1488 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1489 break;
1490 if (offset < 0)
1491 return NULL_TREE;
1493 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1495 if (TREE_CODE (fld) != FIELD_DECL)
1496 continue;
1498 pos = int_bit_position (fld);
1499 size = tree_low_cst (DECL_SIZE (fld), 1);
1500 if (pos <= offset && (pos + size) > offset)
1501 break;
1503 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1504 return NULL_TREE;
1506 last_artificial = DECL_ARTIFICIAL (fld);
1507 type = TREE_TYPE (fld);
1508 offset -= pos;
1510 /* Artifical sub-objects are ancestors, we do not want to use them for
1511 devirtualization, at least not here. */
1512 if (last_artificial)
1513 return NULL_TREE;
1514 binfo = TYPE_BINFO (type);
1515 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1516 return NULL_TREE;
1517 else
1518 return binfo;
1521 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1522 The statement may be replaced by another statement, e.g., if the call
1523 simplifies to a constant value. Return true if any changes were made.
1524 It is assumed that the operands have been previously folded. */
1526 bool
1527 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1529 gimple stmt = gsi_stmt (*gsi);
1530 tree callee;
1532 /* Check for builtins that CCP can handle using information not
1533 available in the generic fold routines. */
1534 callee = gimple_call_fndecl (stmt);
1535 if (!inplace && callee && DECL_BUILT_IN (callee))
1537 tree result = gimple_fold_builtin (stmt);
1539 if (result)
1541 if (!update_call_from_tree (gsi, result))
1542 gimplify_and_update_call_from_tree (gsi, result);
1543 return true;
1547 /* Check for virtual calls that became direct calls. */
1548 callee = gimple_call_fn (stmt);
1549 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1551 tree binfo, fndecl, delta, obj;
1552 HOST_WIDE_INT token;
1554 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1556 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1557 return true;
1560 obj = OBJ_TYPE_REF_OBJECT (callee);
1561 binfo = gimple_extract_devirt_binfo_from_cst (obj);
1562 if (!binfo)
1563 return false;
1564 token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1565 fndecl = gimple_get_virt_method_for_binfo (token, binfo, &delta);
1566 if (!fndecl)
1567 return false;
1568 gcc_assert (integer_zerop (delta));
1569 gimple_call_set_fndecl (stmt, fndecl);
1570 return true;
1573 return false;
1576 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1577 distinguishes both cases. */
1579 static bool
1580 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1582 bool changed = false;
1583 gimple stmt = gsi_stmt (*gsi);
1584 unsigned i;
1585 gimple_stmt_iterator gsinext = *gsi;
1586 gimple next_stmt;
1588 gsi_next (&gsinext);
1589 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1591 /* Fold the main computation performed by the statement. */
1592 switch (gimple_code (stmt))
1594 case GIMPLE_ASSIGN:
1596 unsigned old_num_ops = gimple_num_ops (stmt);
1597 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1598 tree lhs = gimple_assign_lhs (stmt);
1599 tree new_rhs;
1600 /* First canonicalize operand order. This avoids building new
1601 trees if this is the only thing fold would later do. */
1602 if ((commutative_tree_code (subcode)
1603 || commutative_ternary_tree_code (subcode))
1604 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1605 gimple_assign_rhs2 (stmt), false))
1607 tree tem = gimple_assign_rhs1 (stmt);
1608 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1609 gimple_assign_set_rhs2 (stmt, tem);
1610 changed = true;
1612 new_rhs = fold_gimple_assign (gsi);
1613 if (new_rhs
1614 && !useless_type_conversion_p (TREE_TYPE (lhs),
1615 TREE_TYPE (new_rhs)))
1616 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1617 if (new_rhs
1618 && (!inplace
1619 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1621 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1622 changed = true;
1624 break;
1627 case GIMPLE_COND:
1628 changed |= fold_gimple_cond (stmt);
1629 break;
1631 case GIMPLE_CALL:
1632 /* Fold *& in call arguments. */
1633 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1634 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1636 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1637 if (tmp)
1639 gimple_call_set_arg (stmt, i, tmp);
1640 changed = true;
1643 changed |= gimple_fold_call (gsi, inplace);
1644 break;
1646 case GIMPLE_ASM:
1647 /* Fold *& in asm operands. */
1648 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1650 tree link = gimple_asm_output_op (stmt, i);
1651 tree op = TREE_VALUE (link);
1652 if (REFERENCE_CLASS_P (op)
1653 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1655 TREE_VALUE (link) = op;
1656 changed = true;
1659 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1661 tree link = gimple_asm_input_op (stmt, i);
1662 tree op = TREE_VALUE (link);
1663 if (REFERENCE_CLASS_P (op)
1664 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1666 TREE_VALUE (link) = op;
1667 changed = true;
1670 break;
1672 case GIMPLE_DEBUG:
1673 if (gimple_debug_bind_p (stmt))
1675 tree val = gimple_debug_bind_get_value (stmt);
1676 if (val
1677 && REFERENCE_CLASS_P (val))
1679 tree tem = maybe_fold_reference (val, false);
1680 if (tem)
1682 gimple_debug_bind_set_value (stmt, tem);
1683 changed = true;
1687 break;
1689 default:;
1692 /* If stmt folds into nothing and it was the last stmt in a bb,
1693 don't call gsi_stmt. */
1694 if (gsi_end_p (*gsi))
1696 gcc_assert (next_stmt == NULL);
1697 return changed;
1700 stmt = gsi_stmt (*gsi);
1702 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1703 as we'd changing the next stmt. */
1704 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1706 tree lhs = gimple_get_lhs (stmt);
1707 if (lhs && REFERENCE_CLASS_P (lhs))
1709 tree new_lhs = maybe_fold_reference (lhs, true);
1710 if (new_lhs)
1712 gimple_set_lhs (stmt, new_lhs);
1713 changed = true;
1718 return changed;
1721 /* Fold the statement pointed to by GSI. In some cases, this function may
1722 replace the whole statement with a new one. Returns true iff folding
1723 makes any changes.
1724 The statement pointed to by GSI should be in valid gimple form but may
1725 be in unfolded state as resulting from for example constant propagation
1726 which can produce *&x = 0. */
1728 bool
1729 fold_stmt (gimple_stmt_iterator *gsi)
1731 return fold_stmt_1 (gsi, false);
1734 /* Perform the minimal folding on statement STMT. Only operations like
1735 *&x created by constant propagation are handled. The statement cannot
1736 be replaced with a new one. Return true if the statement was
1737 changed, false otherwise.
1738 The statement STMT should be in valid gimple form but may
1739 be in unfolded state as resulting from for example constant propagation
1740 which can produce *&x = 0. */
1742 bool
1743 fold_stmt_inplace (gimple stmt)
1745 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1746 bool changed = fold_stmt_1 (&gsi, true);
1747 gcc_assert (gsi_stmt (gsi) == stmt);
1748 return changed;
1751 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1752 if EXPR is null or we don't know how.
1753 If non-null, the result always has boolean type. */
1755 static tree
1756 canonicalize_bool (tree expr, bool invert)
1758 if (!expr)
1759 return NULL_TREE;
1760 else if (invert)
1762 if (integer_nonzerop (expr))
1763 return boolean_false_node;
1764 else if (integer_zerop (expr))
1765 return boolean_true_node;
1766 else if (TREE_CODE (expr) == SSA_NAME)
1767 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1768 build_int_cst (TREE_TYPE (expr), 0));
1769 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1770 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1771 boolean_type_node,
1772 TREE_OPERAND (expr, 0),
1773 TREE_OPERAND (expr, 1));
1774 else
1775 return NULL_TREE;
1777 else
1779 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1780 return expr;
1781 if (integer_nonzerop (expr))
1782 return boolean_true_node;
1783 else if (integer_zerop (expr))
1784 return boolean_false_node;
1785 else if (TREE_CODE (expr) == SSA_NAME)
1786 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1787 build_int_cst (TREE_TYPE (expr), 0));
1788 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1789 return fold_build2 (TREE_CODE (expr),
1790 boolean_type_node,
1791 TREE_OPERAND (expr, 0),
1792 TREE_OPERAND (expr, 1));
1793 else
1794 return NULL_TREE;
1798 /* Check to see if a boolean expression EXPR is logically equivalent to the
1799 comparison (OP1 CODE OP2). Check for various identities involving
1800 SSA_NAMEs. */
1802 static bool
1803 same_bool_comparison_p (const_tree expr, enum tree_code code,
1804 const_tree op1, const_tree op2)
1806 gimple s;
1808 /* The obvious case. */
1809 if (TREE_CODE (expr) == code
1810 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1811 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1812 return true;
1814 /* Check for comparing (name, name != 0) and the case where expr
1815 is an SSA_NAME with a definition matching the comparison. */
1816 if (TREE_CODE (expr) == SSA_NAME
1817 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1819 if (operand_equal_p (expr, op1, 0))
1820 return ((code == NE_EXPR && integer_zerop (op2))
1821 || (code == EQ_EXPR && integer_nonzerop (op2)));
1822 s = SSA_NAME_DEF_STMT (expr);
1823 if (is_gimple_assign (s)
1824 && gimple_assign_rhs_code (s) == code
1825 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1826 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1827 return true;
1830 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1831 of name is a comparison, recurse. */
1832 if (TREE_CODE (op1) == SSA_NAME
1833 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1835 s = SSA_NAME_DEF_STMT (op1);
1836 if (is_gimple_assign (s)
1837 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1839 enum tree_code c = gimple_assign_rhs_code (s);
1840 if ((c == NE_EXPR && integer_zerop (op2))
1841 || (c == EQ_EXPR && integer_nonzerop (op2)))
1842 return same_bool_comparison_p (expr, c,
1843 gimple_assign_rhs1 (s),
1844 gimple_assign_rhs2 (s));
1845 if ((c == EQ_EXPR && integer_zerop (op2))
1846 || (c == NE_EXPR && integer_nonzerop (op2)))
1847 return same_bool_comparison_p (expr,
1848 invert_tree_comparison (c, false),
1849 gimple_assign_rhs1 (s),
1850 gimple_assign_rhs2 (s));
1853 return false;
1856 /* Check to see if two boolean expressions OP1 and OP2 are logically
1857 equivalent. */
1859 static bool
1860 same_bool_result_p (const_tree op1, const_tree op2)
1862 /* Simple cases first. */
1863 if (operand_equal_p (op1, op2, 0))
1864 return true;
1866 /* Check the cases where at least one of the operands is a comparison.
1867 These are a bit smarter than operand_equal_p in that they apply some
1868 identifies on SSA_NAMEs. */
1869 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1870 && same_bool_comparison_p (op1, TREE_CODE (op2),
1871 TREE_OPERAND (op2, 0),
1872 TREE_OPERAND (op2, 1)))
1873 return true;
1874 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1875 && same_bool_comparison_p (op2, TREE_CODE (op1),
1876 TREE_OPERAND (op1, 0),
1877 TREE_OPERAND (op1, 1)))
1878 return true;
1880 /* Default case. */
1881 return false;
1884 /* Forward declarations for some mutually recursive functions. */
1886 static tree
1887 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1888 enum tree_code code2, tree op2a, tree op2b);
1889 static tree
1890 and_var_with_comparison (tree var, bool invert,
1891 enum tree_code code2, tree op2a, tree op2b);
1892 static tree
1893 and_var_with_comparison_1 (gimple stmt,
1894 enum tree_code code2, tree op2a, tree op2b);
1895 static tree
1896 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1897 enum tree_code code2, tree op2a, tree op2b);
1898 static tree
1899 or_var_with_comparison (tree var, bool invert,
1900 enum tree_code code2, tree op2a, tree op2b);
1901 static tree
1902 or_var_with_comparison_1 (gimple stmt,
1903 enum tree_code code2, tree op2a, tree op2b);
1905 /* Helper function for and_comparisons_1: try to simplify the AND of the
1906 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1907 If INVERT is true, invert the value of the VAR before doing the AND.
1908 Return NULL_EXPR if we can't simplify this to a single expression. */
1910 static tree
1911 and_var_with_comparison (tree var, bool invert,
1912 enum tree_code code2, tree op2a, tree op2b)
1914 tree t;
1915 gimple stmt = SSA_NAME_DEF_STMT (var);
1917 /* We can only deal with variables whose definitions are assignments. */
1918 if (!is_gimple_assign (stmt))
1919 return NULL_TREE;
1921 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1922 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1923 Then we only have to consider the simpler non-inverted cases. */
1924 if (invert)
1925 t = or_var_with_comparison_1 (stmt,
1926 invert_tree_comparison (code2, false),
1927 op2a, op2b);
1928 else
1929 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1930 return canonicalize_bool (t, invert);
1933 /* Try to simplify the AND of the ssa variable defined by the assignment
1934 STMT with the comparison specified by (OP2A CODE2 OP2B).
1935 Return NULL_EXPR if we can't simplify this to a single expression. */
1937 static tree
1938 and_var_with_comparison_1 (gimple stmt,
1939 enum tree_code code2, tree op2a, tree op2b)
1941 tree var = gimple_assign_lhs (stmt);
1942 tree true_test_var = NULL_TREE;
1943 tree false_test_var = NULL_TREE;
1944 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1946 /* Check for identities like (var AND (var == 0)) => false. */
1947 if (TREE_CODE (op2a) == SSA_NAME
1948 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1950 if ((code2 == NE_EXPR && integer_zerop (op2b))
1951 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1953 true_test_var = op2a;
1954 if (var == true_test_var)
1955 return var;
1957 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1958 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1960 false_test_var = op2a;
1961 if (var == false_test_var)
1962 return boolean_false_node;
1966 /* If the definition is a comparison, recurse on it. */
1967 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1969 tree t = and_comparisons_1 (innercode,
1970 gimple_assign_rhs1 (stmt),
1971 gimple_assign_rhs2 (stmt),
1972 code2,
1973 op2a,
1974 op2b);
1975 if (t)
1976 return t;
1979 /* If the definition is an AND or OR expression, we may be able to
1980 simplify by reassociating. */
1981 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1982 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1984 tree inner1 = gimple_assign_rhs1 (stmt);
1985 tree inner2 = gimple_assign_rhs2 (stmt);
1986 gimple s;
1987 tree t;
1988 tree partial = NULL_TREE;
1989 bool is_and = (innercode == BIT_AND_EXPR);
1991 /* Check for boolean identities that don't require recursive examination
1992 of inner1/inner2:
1993 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1994 inner1 AND (inner1 OR inner2) => inner1
1995 !inner1 AND (inner1 AND inner2) => false
1996 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1997 Likewise for similar cases involving inner2. */
1998 if (inner1 == true_test_var)
1999 return (is_and ? var : inner1);
2000 else if (inner2 == true_test_var)
2001 return (is_and ? var : inner2);
2002 else if (inner1 == false_test_var)
2003 return (is_and
2004 ? boolean_false_node
2005 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
2006 else if (inner2 == false_test_var)
2007 return (is_and
2008 ? boolean_false_node
2009 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
2011 /* Next, redistribute/reassociate the AND across the inner tests.
2012 Compute the first partial result, (inner1 AND (op2a code op2b)) */
2013 if (TREE_CODE (inner1) == SSA_NAME
2014 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2015 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2016 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2017 gimple_assign_rhs1 (s),
2018 gimple_assign_rhs2 (s),
2019 code2, op2a, op2b)))
2021 /* Handle the AND case, where we are reassociating:
2022 (inner1 AND inner2) AND (op2a code2 op2b)
2023 => (t AND inner2)
2024 If the partial result t is a constant, we win. Otherwise
2025 continue on to try reassociating with the other inner test. */
2026 if (is_and)
2028 if (integer_onep (t))
2029 return inner2;
2030 else if (integer_zerop (t))
2031 return boolean_false_node;
2034 /* Handle the OR case, where we are redistributing:
2035 (inner1 OR inner2) AND (op2a code2 op2b)
2036 => (t OR (inner2 AND (op2a code2 op2b))) */
2037 else if (integer_onep (t))
2038 return boolean_true_node;
2040 /* Save partial result for later. */
2041 partial = t;
2044 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2045 if (TREE_CODE (inner2) == SSA_NAME
2046 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2047 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2048 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2049 gimple_assign_rhs1 (s),
2050 gimple_assign_rhs2 (s),
2051 code2, op2a, op2b)))
2053 /* Handle the AND case, where we are reassociating:
2054 (inner1 AND inner2) AND (op2a code2 op2b)
2055 => (inner1 AND t) */
2056 if (is_and)
2058 if (integer_onep (t))
2059 return inner1;
2060 else if (integer_zerop (t))
2061 return boolean_false_node;
2062 /* If both are the same, we can apply the identity
2063 (x AND x) == x. */
2064 else if (partial && same_bool_result_p (t, partial))
2065 return t;
2068 /* Handle the OR case. where we are redistributing:
2069 (inner1 OR inner2) AND (op2a code2 op2b)
2070 => (t OR (inner1 AND (op2a code2 op2b)))
2071 => (t OR partial) */
2072 else
2074 if (integer_onep (t))
2075 return boolean_true_node;
2076 else if (partial)
2078 /* We already got a simplification for the other
2079 operand to the redistributed OR expression. The
2080 interesting case is when at least one is false.
2081 Or, if both are the same, we can apply the identity
2082 (x OR x) == x. */
2083 if (integer_zerop (partial))
2084 return t;
2085 else if (integer_zerop (t))
2086 return partial;
2087 else if (same_bool_result_p (t, partial))
2088 return t;
2093 return NULL_TREE;
2096 /* Try to simplify the AND of two comparisons defined by
2097 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2098 If this can be done without constructing an intermediate value,
2099 return the resulting tree; otherwise NULL_TREE is returned.
2100 This function is deliberately asymmetric as it recurses on SSA_DEFs
2101 in the first comparison but not the second. */
2103 static tree
2104 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2105 enum tree_code code2, tree op2a, tree op2b)
2107 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2108 if (operand_equal_p (op1a, op2a, 0)
2109 && operand_equal_p (op1b, op2b, 0))
2111 /* Result will be either NULL_TREE, or a combined comparison. */
2112 tree t = combine_comparisons (UNKNOWN_LOCATION,
2113 TRUTH_ANDIF_EXPR, code1, code2,
2114 boolean_type_node, op1a, op1b);
2115 if (t)
2116 return t;
2119 /* Likewise the swapped case of the above. */
2120 if (operand_equal_p (op1a, op2b, 0)
2121 && operand_equal_p (op1b, op2a, 0))
2123 /* Result will be either NULL_TREE, or a combined comparison. */
2124 tree t = combine_comparisons (UNKNOWN_LOCATION,
2125 TRUTH_ANDIF_EXPR, code1,
2126 swap_tree_comparison (code2),
2127 boolean_type_node, op1a, op1b);
2128 if (t)
2129 return t;
2132 /* If both comparisons are of the same value against constants, we might
2133 be able to merge them. */
2134 if (operand_equal_p (op1a, op2a, 0)
2135 && TREE_CODE (op1b) == INTEGER_CST
2136 && TREE_CODE (op2b) == INTEGER_CST)
2138 int cmp = tree_int_cst_compare (op1b, op2b);
2140 /* If we have (op1a == op1b), we should either be able to
2141 return that or FALSE, depending on whether the constant op1b
2142 also satisfies the other comparison against op2b. */
2143 if (code1 == EQ_EXPR)
2145 bool done = true;
2146 bool val;
2147 switch (code2)
2149 case EQ_EXPR: val = (cmp == 0); break;
2150 case NE_EXPR: val = (cmp != 0); break;
2151 case LT_EXPR: val = (cmp < 0); break;
2152 case GT_EXPR: val = (cmp > 0); break;
2153 case LE_EXPR: val = (cmp <= 0); break;
2154 case GE_EXPR: val = (cmp >= 0); break;
2155 default: done = false;
2157 if (done)
2159 if (val)
2160 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2161 else
2162 return boolean_false_node;
2165 /* Likewise if the second comparison is an == comparison. */
2166 else if (code2 == EQ_EXPR)
2168 bool done = true;
2169 bool val;
2170 switch (code1)
2172 case EQ_EXPR: val = (cmp == 0); break;
2173 case NE_EXPR: val = (cmp != 0); break;
2174 case LT_EXPR: val = (cmp > 0); break;
2175 case GT_EXPR: val = (cmp < 0); break;
2176 case LE_EXPR: val = (cmp >= 0); break;
2177 case GE_EXPR: val = (cmp <= 0); break;
2178 default: done = false;
2180 if (done)
2182 if (val)
2183 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2184 else
2185 return boolean_false_node;
2189 /* Same business with inequality tests. */
2190 else if (code1 == NE_EXPR)
2192 bool val;
2193 switch (code2)
2195 case EQ_EXPR: val = (cmp != 0); break;
2196 case NE_EXPR: val = (cmp == 0); break;
2197 case LT_EXPR: val = (cmp >= 0); break;
2198 case GT_EXPR: val = (cmp <= 0); break;
2199 case LE_EXPR: val = (cmp > 0); break;
2200 case GE_EXPR: val = (cmp < 0); break;
2201 default:
2202 val = false;
2204 if (val)
2205 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2207 else if (code2 == NE_EXPR)
2209 bool val;
2210 switch (code1)
2212 case EQ_EXPR: val = (cmp == 0); break;
2213 case NE_EXPR: val = (cmp != 0); break;
2214 case LT_EXPR: val = (cmp <= 0); break;
2215 case GT_EXPR: val = (cmp >= 0); break;
2216 case LE_EXPR: val = (cmp < 0); break;
2217 case GE_EXPR: val = (cmp > 0); break;
2218 default:
2219 val = false;
2221 if (val)
2222 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2225 /* Chose the more restrictive of two < or <= comparisons. */
2226 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2227 && (code2 == LT_EXPR || code2 == LE_EXPR))
2229 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2230 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2231 else
2232 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2235 /* Likewise chose the more restrictive of two > or >= comparisons. */
2236 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2237 && (code2 == GT_EXPR || code2 == GE_EXPR))
2239 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2240 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2241 else
2242 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2245 /* Check for singleton ranges. */
2246 else if (cmp == 0
2247 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2248 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2249 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2251 /* Check for disjoint ranges. */
2252 else if (cmp <= 0
2253 && (code1 == LT_EXPR || code1 == LE_EXPR)
2254 && (code2 == GT_EXPR || code2 == GE_EXPR))
2255 return boolean_false_node;
2256 else if (cmp >= 0
2257 && (code1 == GT_EXPR || code1 == GE_EXPR)
2258 && (code2 == LT_EXPR || code2 == LE_EXPR))
2259 return boolean_false_node;
2262 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2263 NAME's definition is a truth value. See if there are any simplifications
2264 that can be done against the NAME's definition. */
2265 if (TREE_CODE (op1a) == SSA_NAME
2266 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2267 && (integer_zerop (op1b) || integer_onep (op1b)))
2269 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2270 || (code1 == NE_EXPR && integer_onep (op1b)));
2271 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2272 switch (gimple_code (stmt))
2274 case GIMPLE_ASSIGN:
2275 /* Try to simplify by copy-propagating the definition. */
2276 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2278 case GIMPLE_PHI:
2279 /* If every argument to the PHI produces the same result when
2280 ANDed with the second comparison, we win.
2281 Do not do this unless the type is bool since we need a bool
2282 result here anyway. */
2283 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2285 tree result = NULL_TREE;
2286 unsigned i;
2287 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2289 tree arg = gimple_phi_arg_def (stmt, i);
2291 /* If this PHI has itself as an argument, ignore it.
2292 If all the other args produce the same result,
2293 we're still OK. */
2294 if (arg == gimple_phi_result (stmt))
2295 continue;
2296 else if (TREE_CODE (arg) == INTEGER_CST)
2298 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2300 if (!result)
2301 result = boolean_false_node;
2302 else if (!integer_zerop (result))
2303 return NULL_TREE;
2305 else if (!result)
2306 result = fold_build2 (code2, boolean_type_node,
2307 op2a, op2b);
2308 else if (!same_bool_comparison_p (result,
2309 code2, op2a, op2b))
2310 return NULL_TREE;
2312 else if (TREE_CODE (arg) == SSA_NAME
2313 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2315 tree temp;
2316 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2317 /* In simple cases we can look through PHI nodes,
2318 but we have to be careful with loops.
2319 See PR49073. */
2320 if (! dom_info_available_p (CDI_DOMINATORS)
2321 || gimple_bb (def_stmt) == gimple_bb (stmt)
2322 || dominated_by_p (CDI_DOMINATORS,
2323 gimple_bb (def_stmt),
2324 gimple_bb (stmt)))
2325 return NULL_TREE;
2326 temp = and_var_with_comparison (arg, invert, code2,
2327 op2a, op2b);
2328 if (!temp)
2329 return NULL_TREE;
2330 else if (!result)
2331 result = temp;
2332 else if (!same_bool_result_p (result, temp))
2333 return NULL_TREE;
2335 else
2336 return NULL_TREE;
2338 return result;
2341 default:
2342 break;
2345 return NULL_TREE;
2348 /* Try to simplify the AND of two comparisons, specified by
2349 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2350 If this can be simplified to a single expression (without requiring
2351 introducing more SSA variables to hold intermediate values),
2352 return the resulting tree. Otherwise return NULL_TREE.
2353 If the result expression is non-null, it has boolean type. */
2355 tree
2356 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2357 enum tree_code code2, tree op2a, tree op2b)
2359 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2360 if (t)
2361 return t;
2362 else
2363 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2366 /* Helper function for or_comparisons_1: try to simplify the OR of the
2367 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2368 If INVERT is true, invert the value of VAR before doing the OR.
2369 Return NULL_EXPR if we can't simplify this to a single expression. */
2371 static tree
2372 or_var_with_comparison (tree var, bool invert,
2373 enum tree_code code2, tree op2a, tree op2b)
2375 tree t;
2376 gimple stmt = SSA_NAME_DEF_STMT (var);
2378 /* We can only deal with variables whose definitions are assignments. */
2379 if (!is_gimple_assign (stmt))
2380 return NULL_TREE;
2382 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2383 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2384 Then we only have to consider the simpler non-inverted cases. */
2385 if (invert)
2386 t = and_var_with_comparison_1 (stmt,
2387 invert_tree_comparison (code2, false),
2388 op2a, op2b);
2389 else
2390 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2391 return canonicalize_bool (t, invert);
2394 /* Try to simplify the OR of the ssa variable defined by the assignment
2395 STMT with the comparison specified by (OP2A CODE2 OP2B).
2396 Return NULL_EXPR if we can't simplify this to a single expression. */
2398 static tree
2399 or_var_with_comparison_1 (gimple stmt,
2400 enum tree_code code2, tree op2a, tree op2b)
2402 tree var = gimple_assign_lhs (stmt);
2403 tree true_test_var = NULL_TREE;
2404 tree false_test_var = NULL_TREE;
2405 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2407 /* Check for identities like (var OR (var != 0)) => true . */
2408 if (TREE_CODE (op2a) == SSA_NAME
2409 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2411 if ((code2 == NE_EXPR && integer_zerop (op2b))
2412 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2414 true_test_var = op2a;
2415 if (var == true_test_var)
2416 return var;
2418 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2419 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2421 false_test_var = op2a;
2422 if (var == false_test_var)
2423 return boolean_true_node;
2427 /* If the definition is a comparison, recurse on it. */
2428 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2430 tree t = or_comparisons_1 (innercode,
2431 gimple_assign_rhs1 (stmt),
2432 gimple_assign_rhs2 (stmt),
2433 code2,
2434 op2a,
2435 op2b);
2436 if (t)
2437 return t;
2440 /* If the definition is an AND or OR expression, we may be able to
2441 simplify by reassociating. */
2442 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2443 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2445 tree inner1 = gimple_assign_rhs1 (stmt);
2446 tree inner2 = gimple_assign_rhs2 (stmt);
2447 gimple s;
2448 tree t;
2449 tree partial = NULL_TREE;
2450 bool is_or = (innercode == BIT_IOR_EXPR);
2452 /* Check for boolean identities that don't require recursive examination
2453 of inner1/inner2:
2454 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2455 inner1 OR (inner1 AND inner2) => inner1
2456 !inner1 OR (inner1 OR inner2) => true
2457 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2459 if (inner1 == true_test_var)
2460 return (is_or ? var : inner1);
2461 else if (inner2 == true_test_var)
2462 return (is_or ? var : inner2);
2463 else if (inner1 == false_test_var)
2464 return (is_or
2465 ? boolean_true_node
2466 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2467 else if (inner2 == false_test_var)
2468 return (is_or
2469 ? boolean_true_node
2470 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2472 /* Next, redistribute/reassociate the OR across the inner tests.
2473 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2474 if (TREE_CODE (inner1) == SSA_NAME
2475 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2476 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2477 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2478 gimple_assign_rhs1 (s),
2479 gimple_assign_rhs2 (s),
2480 code2, op2a, op2b)))
2482 /* Handle the OR case, where we are reassociating:
2483 (inner1 OR inner2) OR (op2a code2 op2b)
2484 => (t OR inner2)
2485 If the partial result t is a constant, we win. Otherwise
2486 continue on to try reassociating with the other inner test. */
2487 if (is_or)
2489 if (integer_onep (t))
2490 return boolean_true_node;
2491 else if (integer_zerop (t))
2492 return inner2;
2495 /* Handle the AND case, where we are redistributing:
2496 (inner1 AND inner2) OR (op2a code2 op2b)
2497 => (t AND (inner2 OR (op2a code op2b))) */
2498 else if (integer_zerop (t))
2499 return boolean_false_node;
2501 /* Save partial result for later. */
2502 partial = t;
2505 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2506 if (TREE_CODE (inner2) == SSA_NAME
2507 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2508 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2509 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2510 gimple_assign_rhs1 (s),
2511 gimple_assign_rhs2 (s),
2512 code2, op2a, op2b)))
2514 /* Handle the OR case, where we are reassociating:
2515 (inner1 OR inner2) OR (op2a code2 op2b)
2516 => (inner1 OR t)
2517 => (t OR partial) */
2518 if (is_or)
2520 if (integer_zerop (t))
2521 return inner1;
2522 else if (integer_onep (t))
2523 return boolean_true_node;
2524 /* If both are the same, we can apply the identity
2525 (x OR x) == x. */
2526 else if (partial && same_bool_result_p (t, partial))
2527 return t;
2530 /* Handle the AND case, where we are redistributing:
2531 (inner1 AND inner2) OR (op2a code2 op2b)
2532 => (t AND (inner1 OR (op2a code2 op2b)))
2533 => (t AND partial) */
2534 else
2536 if (integer_zerop (t))
2537 return boolean_false_node;
2538 else if (partial)
2540 /* We already got a simplification for the other
2541 operand to the redistributed AND expression. The
2542 interesting case is when at least one is true.
2543 Or, if both are the same, we can apply the identity
2544 (x AND x) == x. */
2545 if (integer_onep (partial))
2546 return t;
2547 else if (integer_onep (t))
2548 return partial;
2549 else if (same_bool_result_p (t, partial))
2550 return t;
2555 return NULL_TREE;
2558 /* Try to simplify the OR of two comparisons defined by
2559 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2560 If this can be done without constructing an intermediate value,
2561 return the resulting tree; otherwise NULL_TREE is returned.
2562 This function is deliberately asymmetric as it recurses on SSA_DEFs
2563 in the first comparison but not the second. */
2565 static tree
2566 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2567 enum tree_code code2, tree op2a, tree op2b)
2569 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2570 if (operand_equal_p (op1a, op2a, 0)
2571 && operand_equal_p (op1b, op2b, 0))
2573 /* Result will be either NULL_TREE, or a combined comparison. */
2574 tree t = combine_comparisons (UNKNOWN_LOCATION,
2575 TRUTH_ORIF_EXPR, code1, code2,
2576 boolean_type_node, op1a, op1b);
2577 if (t)
2578 return t;
2581 /* Likewise the swapped case of the above. */
2582 if (operand_equal_p (op1a, op2b, 0)
2583 && operand_equal_p (op1b, op2a, 0))
2585 /* Result will be either NULL_TREE, or a combined comparison. */
2586 tree t = combine_comparisons (UNKNOWN_LOCATION,
2587 TRUTH_ORIF_EXPR, code1,
2588 swap_tree_comparison (code2),
2589 boolean_type_node, op1a, op1b);
2590 if (t)
2591 return t;
2594 /* If both comparisons are of the same value against constants, we might
2595 be able to merge them. */
2596 if (operand_equal_p (op1a, op2a, 0)
2597 && TREE_CODE (op1b) == INTEGER_CST
2598 && TREE_CODE (op2b) == INTEGER_CST)
2600 int cmp = tree_int_cst_compare (op1b, op2b);
2602 /* If we have (op1a != op1b), we should either be able to
2603 return that or TRUE, depending on whether the constant op1b
2604 also satisfies the other comparison against op2b. */
2605 if (code1 == NE_EXPR)
2607 bool done = true;
2608 bool val;
2609 switch (code2)
2611 case EQ_EXPR: val = (cmp == 0); break;
2612 case NE_EXPR: val = (cmp != 0); break;
2613 case LT_EXPR: val = (cmp < 0); break;
2614 case GT_EXPR: val = (cmp > 0); break;
2615 case LE_EXPR: val = (cmp <= 0); break;
2616 case GE_EXPR: val = (cmp >= 0); break;
2617 default: done = false;
2619 if (done)
2621 if (val)
2622 return boolean_true_node;
2623 else
2624 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2627 /* Likewise if the second comparison is a != comparison. */
2628 else if (code2 == NE_EXPR)
2630 bool done = true;
2631 bool val;
2632 switch (code1)
2634 case EQ_EXPR: val = (cmp == 0); break;
2635 case NE_EXPR: val = (cmp != 0); break;
2636 case LT_EXPR: val = (cmp > 0); break;
2637 case GT_EXPR: val = (cmp < 0); break;
2638 case LE_EXPR: val = (cmp >= 0); break;
2639 case GE_EXPR: val = (cmp <= 0); break;
2640 default: done = false;
2642 if (done)
2644 if (val)
2645 return boolean_true_node;
2646 else
2647 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2651 /* See if an equality test is redundant with the other comparison. */
2652 else if (code1 == EQ_EXPR)
2654 bool val;
2655 switch (code2)
2657 case EQ_EXPR: val = (cmp == 0); break;
2658 case NE_EXPR: val = (cmp != 0); break;
2659 case LT_EXPR: val = (cmp < 0); break;
2660 case GT_EXPR: val = (cmp > 0); break;
2661 case LE_EXPR: val = (cmp <= 0); break;
2662 case GE_EXPR: val = (cmp >= 0); break;
2663 default:
2664 val = false;
2666 if (val)
2667 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2669 else if (code2 == EQ_EXPR)
2671 bool val;
2672 switch (code1)
2674 case EQ_EXPR: val = (cmp == 0); break;
2675 case NE_EXPR: val = (cmp != 0); break;
2676 case LT_EXPR: val = (cmp > 0); break;
2677 case GT_EXPR: val = (cmp < 0); break;
2678 case LE_EXPR: val = (cmp >= 0); break;
2679 case GE_EXPR: val = (cmp <= 0); break;
2680 default:
2681 val = false;
2683 if (val)
2684 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2687 /* Chose the less restrictive of two < or <= comparisons. */
2688 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2689 && (code2 == LT_EXPR || code2 == LE_EXPR))
2691 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2692 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2693 else
2694 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2697 /* Likewise chose the less restrictive of two > or >= comparisons. */
2698 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2699 && (code2 == GT_EXPR || code2 == GE_EXPR))
2701 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2702 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2703 else
2704 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2707 /* Check for singleton ranges. */
2708 else if (cmp == 0
2709 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2710 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2711 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2713 /* Check for less/greater pairs that don't restrict the range at all. */
2714 else if (cmp >= 0
2715 && (code1 == LT_EXPR || code1 == LE_EXPR)
2716 && (code2 == GT_EXPR || code2 == GE_EXPR))
2717 return boolean_true_node;
2718 else if (cmp <= 0
2719 && (code1 == GT_EXPR || code1 == GE_EXPR)
2720 && (code2 == LT_EXPR || code2 == LE_EXPR))
2721 return boolean_true_node;
2724 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2725 NAME's definition is a truth value. See if there are any simplifications
2726 that can be done against the NAME's definition. */
2727 if (TREE_CODE (op1a) == SSA_NAME
2728 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2729 && (integer_zerop (op1b) || integer_onep (op1b)))
2731 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2732 || (code1 == NE_EXPR && integer_onep (op1b)));
2733 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2734 switch (gimple_code (stmt))
2736 case GIMPLE_ASSIGN:
2737 /* Try to simplify by copy-propagating the definition. */
2738 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2740 case GIMPLE_PHI:
2741 /* If every argument to the PHI produces the same result when
2742 ORed with the second comparison, we win.
2743 Do not do this unless the type is bool since we need a bool
2744 result here anyway. */
2745 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2747 tree result = NULL_TREE;
2748 unsigned i;
2749 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2751 tree arg = gimple_phi_arg_def (stmt, i);
2753 /* If this PHI has itself as an argument, ignore it.
2754 If all the other args produce the same result,
2755 we're still OK. */
2756 if (arg == gimple_phi_result (stmt))
2757 continue;
2758 else if (TREE_CODE (arg) == INTEGER_CST)
2760 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2762 if (!result)
2763 result = boolean_true_node;
2764 else if (!integer_onep (result))
2765 return NULL_TREE;
2767 else if (!result)
2768 result = fold_build2 (code2, boolean_type_node,
2769 op2a, op2b);
2770 else if (!same_bool_comparison_p (result,
2771 code2, op2a, op2b))
2772 return NULL_TREE;
2774 else if (TREE_CODE (arg) == SSA_NAME
2775 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2777 tree temp;
2778 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2779 /* In simple cases we can look through PHI nodes,
2780 but we have to be careful with loops.
2781 See PR49073. */
2782 if (! dom_info_available_p (CDI_DOMINATORS)
2783 || gimple_bb (def_stmt) == gimple_bb (stmt)
2784 || dominated_by_p (CDI_DOMINATORS,
2785 gimple_bb (def_stmt),
2786 gimple_bb (stmt)))
2787 return NULL_TREE;
2788 temp = or_var_with_comparison (arg, invert, code2,
2789 op2a, op2b);
2790 if (!temp)
2791 return NULL_TREE;
2792 else if (!result)
2793 result = temp;
2794 else if (!same_bool_result_p (result, temp))
2795 return NULL_TREE;
2797 else
2798 return NULL_TREE;
2800 return result;
2803 default:
2804 break;
2807 return NULL_TREE;
2810 /* Try to simplify the OR of two comparisons, specified by
2811 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2812 If this can be simplified to a single expression (without requiring
2813 introducing more SSA variables to hold intermediate values),
2814 return the resulting tree. Otherwise return NULL_TREE.
2815 If the result expression is non-null, it has boolean type. */
2817 tree
2818 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2819 enum tree_code code2, tree op2a, tree op2b)
2821 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2822 if (t)
2823 return t;
2824 else
2825 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2829 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2831 Either NULL_TREE, a simplified but non-constant or a constant
2832 is returned.
2834 ??? This should go into a gimple-fold-inline.h file to be eventually
2835 privatized with the single valueize function used in the various TUs
2836 to avoid the indirect function call overhead. */
2838 tree
2839 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2841 location_t loc = gimple_location (stmt);
2842 switch (gimple_code (stmt))
2844 case GIMPLE_ASSIGN:
2846 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2848 switch (get_gimple_rhs_class (subcode))
2850 case GIMPLE_SINGLE_RHS:
2852 tree rhs = gimple_assign_rhs1 (stmt);
2853 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2855 if (TREE_CODE (rhs) == SSA_NAME)
2857 /* If the RHS is an SSA_NAME, return its known constant value,
2858 if any. */
2859 return (*valueize) (rhs);
2861 /* Handle propagating invariant addresses into address
2862 operations. */
2863 else if (TREE_CODE (rhs) == ADDR_EXPR
2864 && !is_gimple_min_invariant (rhs))
2866 HOST_WIDE_INT offset;
2867 tree base;
2868 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2869 &offset,
2870 valueize);
2871 if (base
2872 && (CONSTANT_CLASS_P (base)
2873 || decl_address_invariant_p (base)))
2874 return build_invariant_address (TREE_TYPE (rhs),
2875 base, offset);
2877 else if (TREE_CODE (rhs) == CONSTRUCTOR
2878 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2879 && (CONSTRUCTOR_NELTS (rhs)
2880 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2882 unsigned i;
2883 tree val, list;
2885 list = NULL_TREE;
2886 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2888 val = (*valueize) (val);
2889 if (TREE_CODE (val) == INTEGER_CST
2890 || TREE_CODE (val) == REAL_CST
2891 || TREE_CODE (val) == FIXED_CST)
2892 list = tree_cons (NULL_TREE, val, list);
2893 else
2894 return NULL_TREE;
2897 return build_vector (TREE_TYPE (rhs), nreverse (list));
2900 if (kind == tcc_reference)
2902 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2903 || TREE_CODE (rhs) == REALPART_EXPR
2904 || TREE_CODE (rhs) == IMAGPART_EXPR)
2905 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2907 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2908 return fold_unary_loc (EXPR_LOCATION (rhs),
2909 TREE_CODE (rhs),
2910 TREE_TYPE (rhs), val);
2912 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2913 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2915 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2916 return fold_ternary_loc (EXPR_LOCATION (rhs),
2917 TREE_CODE (rhs),
2918 TREE_TYPE (rhs), val,
2919 TREE_OPERAND (rhs, 1),
2920 TREE_OPERAND (rhs, 2));
2922 else if (TREE_CODE (rhs) == MEM_REF
2923 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2925 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2926 if (TREE_CODE (val) == ADDR_EXPR
2927 && is_gimple_min_invariant (val))
2929 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2930 unshare_expr (val),
2931 TREE_OPERAND (rhs, 1));
2932 if (tem)
2933 rhs = tem;
2936 return fold_const_aggregate_ref_1 (rhs, valueize);
2938 else if (kind == tcc_declaration)
2939 return get_symbol_constant_value (rhs);
2940 return rhs;
2943 case GIMPLE_UNARY_RHS:
2945 /* Handle unary operators that can appear in GIMPLE form.
2946 Note that we know the single operand must be a constant,
2947 so this should almost always return a simplified RHS. */
2948 tree lhs = gimple_assign_lhs (stmt);
2949 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2951 /* Conversions are useless for CCP purposes if they are
2952 value-preserving. Thus the restrictions that
2953 useless_type_conversion_p places for pointer type conversions
2954 do not apply here. Substitution later will only substitute to
2955 allowed places. */
2956 if (CONVERT_EXPR_CODE_P (subcode)
2957 && POINTER_TYPE_P (TREE_TYPE (lhs))
2958 && POINTER_TYPE_P (TREE_TYPE (op0)))
2960 tree tem;
2961 /* Try to re-construct array references on-the-fly. */
2962 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2963 TREE_TYPE (op0))
2964 && ((tem = maybe_fold_offset_to_address
2965 (loc,
2966 op0, integer_zero_node, TREE_TYPE (lhs)))
2967 != NULL_TREE))
2968 return tem;
2969 return op0;
2972 return
2973 fold_unary_ignore_overflow_loc (loc, subcode,
2974 gimple_expr_type (stmt), op0);
2977 case GIMPLE_BINARY_RHS:
2979 /* Handle binary operators that can appear in GIMPLE form. */
2980 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2981 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2983 /* Translate &x + CST into an invariant form suitable for
2984 further propagation. */
2985 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2986 && TREE_CODE (op0) == ADDR_EXPR
2987 && TREE_CODE (op1) == INTEGER_CST)
2989 tree off = fold_convert (ptr_type_node, op1);
2990 return build_fold_addr_expr
2991 (fold_build2 (MEM_REF,
2992 TREE_TYPE (TREE_TYPE (op0)),
2993 unshare_expr (op0), off));
2996 return fold_binary_loc (loc, subcode,
2997 gimple_expr_type (stmt), op0, op1);
3000 case GIMPLE_TERNARY_RHS:
3002 /* Handle ternary operators that can appear in GIMPLE form. */
3003 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
3004 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
3005 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
3007 return fold_ternary_loc (loc, subcode,
3008 gimple_expr_type (stmt), op0, op1, op2);
3011 default:
3012 gcc_unreachable ();
3016 case GIMPLE_CALL:
3018 tree fn;
3020 if (gimple_call_internal_p (stmt))
3021 /* No folding yet for these functions. */
3022 return NULL_TREE;
3024 fn = (*valueize) (gimple_call_fn (stmt));
3025 if (TREE_CODE (fn) == ADDR_EXPR
3026 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3027 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
3029 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
3030 tree call, retval;
3031 unsigned i;
3032 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3033 args[i] = (*valueize) (gimple_call_arg (stmt, i));
3034 call = build_call_array_loc (loc,
3035 gimple_call_return_type (stmt),
3036 fn, gimple_call_num_args (stmt), args);
3037 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
3038 if (retval)
3039 /* fold_call_expr wraps the result inside a NOP_EXPR. */
3040 STRIP_NOPS (retval);
3041 return retval;
3043 return NULL_TREE;
3046 default:
3047 return NULL_TREE;
3051 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
3052 Returns NULL_TREE if folding to a constant is not possible, otherwise
3053 returns a constant according to is_gimple_min_invariant. */
3055 tree
3056 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
3058 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
3059 if (res && is_gimple_min_invariant (res))
3060 return res;
3061 return NULL_TREE;
3065 /* The following set of functions are supposed to fold references using
3066 their constant initializers. */
3068 static tree fold_ctor_reference (tree type, tree ctor,
3069 unsigned HOST_WIDE_INT offset,
3070 unsigned HOST_WIDE_INT size);
3072 /* See if we can find constructor defining value of BASE.
3073 When we know the consructor with constant offset (such as
3074 base is array[40] and we do know constructor of array), then
3075 BIT_OFFSET is adjusted accordingly.
3077 As a special case, return error_mark_node when constructor
3078 is not explicitly available, but it is known to be zero
3079 such as 'static const int a;'. */
3080 static tree
3081 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
3082 tree (*valueize)(tree))
3084 HOST_WIDE_INT bit_offset2, size, max_size;
3085 if (TREE_CODE (base) == MEM_REF)
3087 if (!integer_zerop (TREE_OPERAND (base, 1)))
3089 if (!host_integerp (TREE_OPERAND (base, 1), 0))
3090 return NULL_TREE;
3091 *bit_offset += (mem_ref_offset (base).low
3092 * BITS_PER_UNIT);
3095 if (valueize
3096 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3097 base = valueize (TREE_OPERAND (base, 0));
3098 if (!base || TREE_CODE (base) != ADDR_EXPR)
3099 return NULL_TREE;
3100 base = TREE_OPERAND (base, 0);
3103 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
3104 DECL_INITIAL. If BASE is a nested reference into another
3105 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
3106 the inner reference. */
3107 switch (TREE_CODE (base))
3109 case VAR_DECL:
3110 if (!const_value_known_p (base))
3111 return NULL_TREE;
3113 /* Fallthru. */
3114 case CONST_DECL:
3115 if (!DECL_INITIAL (base)
3116 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
3117 return error_mark_node;
3118 return DECL_INITIAL (base);
3120 case ARRAY_REF:
3121 case COMPONENT_REF:
3122 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
3123 if (max_size == -1 || size != max_size)
3124 return NULL_TREE;
3125 *bit_offset += bit_offset2;
3126 return get_base_constructor (base, bit_offset, valueize);
3128 case STRING_CST:
3129 case CONSTRUCTOR:
3130 return base;
3132 default:
3133 return NULL_TREE;
3137 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
3138 to the memory at bit OFFSET.
3140 We do only simple job of folding byte accesses. */
3142 static tree
3143 fold_string_cst_ctor_reference (tree type, tree ctor,
3144 unsigned HOST_WIDE_INT offset,
3145 unsigned HOST_WIDE_INT size)
3147 if (INTEGRAL_TYPE_P (type)
3148 && (TYPE_MODE (type)
3149 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3150 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3151 == MODE_INT)
3152 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
3153 && size == BITS_PER_UNIT
3154 && !(offset % BITS_PER_UNIT))
3156 offset /= BITS_PER_UNIT;
3157 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
3158 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
3159 [offset]));
3160 /* Folding
3161 const char a[20]="hello";
3162 return a[10];
3164 might lead to offset greater than string length. In this case we
3165 know value is either initialized to 0 or out of bounds. Return 0
3166 in both cases. */
3167 return build_zero_cst (type);
3169 return NULL_TREE;
3172 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3173 SIZE to the memory at bit OFFSET. */
3175 static tree
3176 fold_array_ctor_reference (tree type, tree ctor,
3177 unsigned HOST_WIDE_INT offset,
3178 unsigned HOST_WIDE_INT size)
3180 unsigned HOST_WIDE_INT cnt;
3181 tree cfield, cval;
3182 double_int low_bound, elt_size;
3183 double_int index, max_index;
3184 double_int access_index;
3185 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
3186 HOST_WIDE_INT inner_offset;
3188 /* Compute low bound and elt size. */
3189 if (domain_type && TYPE_MIN_VALUE (domain_type))
3191 /* Static constructors for variably sized objects makes no sense. */
3192 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
3193 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
3195 else
3196 low_bound = double_int_zero;
3197 /* Static constructors for variably sized objects makes no sense. */
3198 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
3199 == INTEGER_CST);
3200 elt_size =
3201 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
3204 /* We can handle only constantly sized accesses that are known to not
3205 be larger than size of array element. */
3206 if (!TYPE_SIZE_UNIT (type)
3207 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
3208 || double_int_cmp (elt_size,
3209 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
3210 return NULL_TREE;
3212 /* Compute the array index we look for. */
3213 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
3214 elt_size, TRUNC_DIV_EXPR);
3215 access_index = double_int_add (access_index, low_bound);
3217 /* And offset within the access. */
3218 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
3220 /* See if the array field is large enough to span whole access. We do not
3221 care to fold accesses spanning multiple array indexes. */
3222 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
3223 return NULL_TREE;
3225 index = double_int_sub (low_bound, double_int_one);
3226 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
3228 /* Array constructor might explicitely set index, or specify range
3229 or leave index NULL meaning that it is next index after previous
3230 one. */
3231 if (cfield)
3233 if (TREE_CODE (cfield) == INTEGER_CST)
3234 max_index = index = tree_to_double_int (cfield);
3235 else
3237 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3238 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3239 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3242 else
3243 max_index = index = double_int_add (index, double_int_one);
3245 /* Do we have match? */
3246 if (double_int_cmp (access_index, index, 1) >= 0
3247 && double_int_cmp (access_index, max_index, 1) <= 0)
3248 return fold_ctor_reference (type, cval, inner_offset, size);
3250 /* When memory is not explicitely mentioned in constructor,
3251 it is 0 (or out of range). */
3252 return build_zero_cst (type);
3255 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3256 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3258 static tree
3259 fold_nonarray_ctor_reference (tree type, tree ctor,
3260 unsigned HOST_WIDE_INT offset,
3261 unsigned HOST_WIDE_INT size)
3263 unsigned HOST_WIDE_INT cnt;
3264 tree cfield, cval;
3266 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3267 cval)
3269 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3270 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3271 tree field_size = DECL_SIZE (cfield);
3272 double_int bitoffset;
3273 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3274 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
3275 double_int bitoffset_end, access_end;
3277 /* Variable sized objects in static constructors makes no sense,
3278 but field_size can be NULL for flexible array members. */
3279 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3280 && TREE_CODE (byte_offset) == INTEGER_CST
3281 && (field_size != NULL_TREE
3282 ? TREE_CODE (field_size) == INTEGER_CST
3283 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3285 /* Compute bit offset of the field. */
3286 bitoffset = double_int_add (tree_to_double_int (field_offset),
3287 double_int_mul (byte_offset_cst,
3288 bits_per_unit_cst));
3289 /* Compute bit offset where the field ends. */
3290 if (field_size != NULL_TREE)
3291 bitoffset_end = double_int_add (bitoffset,
3292 tree_to_double_int (field_size));
3293 else
3294 bitoffset_end = double_int_zero;
3296 access_end = double_int_add (uhwi_to_double_int (offset),
3297 uhwi_to_double_int (size));
3299 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3300 [BITOFFSET, BITOFFSET_END)? */
3301 if (double_int_cmp (access_end, bitoffset, 0) > 0
3302 && (field_size == NULL_TREE
3303 || double_int_cmp (uhwi_to_double_int (offset),
3304 bitoffset_end, 0) < 0))
3306 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
3307 bitoffset);
3308 /* We do have overlap. Now see if field is large enough to
3309 cover the access. Give up for accesses spanning multiple
3310 fields. */
3311 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
3312 return NULL_TREE;
3313 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
3314 return NULL_TREE;
3315 return fold_ctor_reference (type, cval,
3316 double_int_to_uhwi (inner_offset), size);
3319 /* When memory is not explicitely mentioned in constructor, it is 0. */
3320 return build_zero_cst (type);
3323 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3324 to the memory at bit OFFSET. */
3326 static tree
3327 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3328 unsigned HOST_WIDE_INT size)
3330 tree ret;
3332 /* We found the field with exact match. */
3333 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3334 && !offset)
3335 return canonicalize_constructor_val (ctor);
3337 /* We are at the end of walk, see if we can view convert the
3338 result. */
3339 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3340 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3341 && operand_equal_p (TYPE_SIZE (type),
3342 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3344 ret = canonicalize_constructor_val (ctor);
3345 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3346 if (ret)
3347 STRIP_NOPS (ret);
3348 return ret;
3350 if (TREE_CODE (ctor) == STRING_CST)
3351 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3352 if (TREE_CODE (ctor) == CONSTRUCTOR)
3355 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
3356 return fold_array_ctor_reference (type, ctor, offset, size);
3357 else
3358 return fold_nonarray_ctor_reference (type, ctor, offset, size);
3361 return NULL_TREE;
3364 /* Return the tree representing the element referenced by T if T is an
3365 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3366 names using VALUEIZE. Return NULL_TREE otherwise. */
3368 tree
3369 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3371 tree ctor, idx, base;
3372 HOST_WIDE_INT offset, size, max_size;
3373 tree tem;
3375 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3376 return get_symbol_constant_value (t);
3378 tem = fold_read_from_constant_string (t);
3379 if (tem)
3380 return tem;
3382 switch (TREE_CODE (t))
3384 case ARRAY_REF:
3385 case ARRAY_RANGE_REF:
3386 /* Constant indexes are handled well by get_base_constructor.
3387 Only special case variable offsets.
3388 FIXME: This code can't handle nested references with variable indexes
3389 (they will be handled only by iteration of ccp). Perhaps we can bring
3390 get_ref_base_and_extent here and make it use a valueize callback. */
3391 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3392 && valueize
3393 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3394 && host_integerp (idx, 0))
3396 tree low_bound, unit_size;
3398 /* If the resulting bit-offset is constant, track it. */
3399 if ((low_bound = array_ref_low_bound (t),
3400 host_integerp (low_bound, 0))
3401 && (unit_size = array_ref_element_size (t),
3402 host_integerp (unit_size, 1)))
3404 offset = TREE_INT_CST_LOW (idx);
3405 offset -= TREE_INT_CST_LOW (low_bound);
3406 offset *= TREE_INT_CST_LOW (unit_size);
3407 offset *= BITS_PER_UNIT;
3409 base = TREE_OPERAND (t, 0);
3410 ctor = get_base_constructor (base, &offset, valueize);
3411 /* Empty constructor. Always fold to 0. */
3412 if (ctor == error_mark_node)
3413 return build_zero_cst (TREE_TYPE (t));
3414 /* Out of bound array access. Value is undefined,
3415 but don't fold. */
3416 if (offset < 0)
3417 return NULL_TREE;
3418 /* We can not determine ctor. */
3419 if (!ctor)
3420 return NULL_TREE;
3421 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3422 TREE_INT_CST_LOW (unit_size)
3423 * BITS_PER_UNIT);
3426 /* Fallthru. */
3428 case COMPONENT_REF:
3429 case BIT_FIELD_REF:
3430 case TARGET_MEM_REF:
3431 case MEM_REF:
3432 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3433 ctor = get_base_constructor (base, &offset, valueize);
3435 /* Empty constructor. Always fold to 0. */
3436 if (ctor == error_mark_node)
3437 return build_zero_cst (TREE_TYPE (t));
3438 /* We do not know precise address. */
3439 if (max_size == -1 || max_size != size)
3440 return NULL_TREE;
3441 /* We can not determine ctor. */
3442 if (!ctor)
3443 return NULL_TREE;
3445 /* Out of bound array access. Value is undefined, but don't fold. */
3446 if (offset < 0)
3447 return NULL_TREE;
3449 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3451 case REALPART_EXPR:
3452 case IMAGPART_EXPR:
3454 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3455 if (c && TREE_CODE (c) == COMPLEX_CST)
3456 return fold_build1_loc (EXPR_LOCATION (t),
3457 TREE_CODE (t), TREE_TYPE (t), c);
3458 break;
3461 default:
3462 break;
3465 return NULL_TREE;
3468 tree
3469 fold_const_aggregate_ref (tree t)
3471 return fold_const_aggregate_ref_1 (t, NULL);
3474 /* Return true iff VAL is a gimple expression that is known to be
3475 non-negative. Restricted to floating-point inputs. */
3477 bool
3478 gimple_val_nonnegative_real_p (tree val)
3480 gimple def_stmt;
3482 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3484 /* Use existing logic for non-gimple trees. */
3485 if (tree_expr_nonnegative_p (val))
3486 return true;
3488 if (TREE_CODE (val) != SSA_NAME)
3489 return false;
3491 /* Currently we look only at the immediately defining statement
3492 to make this determination, since recursion on defining
3493 statements of operands can lead to quadratic behavior in the
3494 worst case. This is expected to catch almost all occurrences
3495 in practice. It would be possible to implement limited-depth
3496 recursion if important cases are lost. Alternatively, passes
3497 that need this information (such as the pow/powi lowering code
3498 in the cse_sincos pass) could be revised to provide it through
3499 dataflow propagation. */
3501 def_stmt = SSA_NAME_DEF_STMT (val);
3503 if (is_gimple_assign (def_stmt))
3505 tree op0, op1;
3507 /* See fold-const.c:tree_expr_nonnegative_p for additional
3508 cases that could be handled with recursion. */
3510 switch (gimple_assign_rhs_code (def_stmt))
3512 case ABS_EXPR:
3513 /* Always true for floating-point operands. */
3514 return true;
3516 case MULT_EXPR:
3517 /* True if the two operands are identical (since we are
3518 restricted to floating-point inputs). */
3519 op0 = gimple_assign_rhs1 (def_stmt);
3520 op1 = gimple_assign_rhs2 (def_stmt);
3522 if (op0 == op1
3523 || operand_equal_p (op0, op1, 0))
3524 return true;
3526 default:
3527 return false;
3530 else if (is_gimple_call (def_stmt))
3532 tree fndecl = gimple_call_fndecl (def_stmt);
3533 if (fndecl
3534 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3536 tree arg1;
3538 switch (DECL_FUNCTION_CODE (fndecl))
3540 CASE_FLT_FN (BUILT_IN_ACOS):
3541 CASE_FLT_FN (BUILT_IN_ACOSH):
3542 CASE_FLT_FN (BUILT_IN_CABS):
3543 CASE_FLT_FN (BUILT_IN_COSH):
3544 CASE_FLT_FN (BUILT_IN_ERFC):
3545 CASE_FLT_FN (BUILT_IN_EXP):
3546 CASE_FLT_FN (BUILT_IN_EXP10):
3547 CASE_FLT_FN (BUILT_IN_EXP2):
3548 CASE_FLT_FN (BUILT_IN_FABS):
3549 CASE_FLT_FN (BUILT_IN_FDIM):
3550 CASE_FLT_FN (BUILT_IN_HYPOT):
3551 CASE_FLT_FN (BUILT_IN_POW10):
3552 return true;
3554 CASE_FLT_FN (BUILT_IN_SQRT):
3555 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3556 nonnegative inputs. */
3557 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3558 return true;
3560 break;
3562 CASE_FLT_FN (BUILT_IN_POWI):
3563 /* True if the second argument is an even integer. */
3564 arg1 = gimple_call_arg (def_stmt, 1);
3566 if (TREE_CODE (arg1) == INTEGER_CST
3567 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3568 return true;
3570 break;
3572 CASE_FLT_FN (BUILT_IN_POW):
3573 /* True if the second argument is an even integer-valued
3574 real. */
3575 arg1 = gimple_call_arg (def_stmt, 1);
3577 if (TREE_CODE (arg1) == REAL_CST)
3579 REAL_VALUE_TYPE c;
3580 HOST_WIDE_INT n;
3582 c = TREE_REAL_CST (arg1);
3583 n = real_to_integer (&c);
3585 if ((n & 1) == 0)
3587 REAL_VALUE_TYPE cint;
3588 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3589 if (real_identical (&c, &cint))
3590 return true;
3594 break;
3596 default:
3597 return false;
3602 return false;