* tree-flow-inline.h (op_iter_init): Reject GIMPLE_PHI stmts.
[official-gcc.git] / gcc / gimple-fold.c
blob180a51e095a6c06de93e1e299eb4134daa20028c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
109 return true;
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
115 tree
116 canonicalize_constructor_val (tree cval)
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
121 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
122 TREE_OPERAND (cval, 0),
123 TREE_OPERAND (cval, 1),
124 TREE_TYPE (cval));
125 if (t)
126 cval = t;
128 if (TREE_CODE (cval) == ADDR_EXPR)
130 tree base = get_base_address (TREE_OPERAND (cval, 0));
132 if (base
133 && (TREE_CODE (base) == VAR_DECL
134 || TREE_CODE (base) == FUNCTION_DECL)
135 && !can_refer_decl_in_current_unit_p (base))
136 return NULL_TREE;
137 if (cfun && base && TREE_CODE (base) == VAR_DECL)
138 add_referenced_var (base);
139 /* Fixup types in global initializers. */
140 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
141 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
143 return cval;
146 /* If SYM is a constant variable with known value, return the value.
147 NULL_TREE is returned otherwise. */
149 tree
150 get_symbol_constant_value (tree sym)
152 if (const_value_known_p (sym))
154 tree val = DECL_INITIAL (sym);
155 if (val)
157 val = canonicalize_constructor_val (val);
158 if (val && is_gimple_min_invariant (val))
159 return val;
160 else
161 return NULL_TREE;
163 /* Variables declared 'const' without an initializer
164 have zero as the initializer if they may not be
165 overridden at link or run time. */
166 if (!val
167 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
168 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
169 return build_zero_cst (TREE_TYPE (sym));
172 return NULL_TREE;
176 /* Return true if we may propagate the address expression ADDR into the
177 dereference DEREF and cancel them. */
179 bool
180 may_propagate_address_into_dereference (tree addr, tree deref)
182 gcc_assert (TREE_CODE (deref) == MEM_REF
183 && TREE_CODE (addr) == ADDR_EXPR);
185 /* Don't propagate if ADDR's operand has incomplete type. */
186 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
187 return false;
189 /* If the address is invariant then we do not need to preserve restrict
190 qualifications. But we do need to preserve volatile qualifiers until
191 we can annotate the folded dereference itself properly. */
192 if (is_gimple_min_invariant (addr)
193 && (!TREE_THIS_VOLATILE (deref)
194 || TYPE_VOLATILE (TREE_TYPE (addr))))
195 return useless_type_conversion_p (TREE_TYPE (deref),
196 TREE_TYPE (TREE_OPERAND (addr, 0)));
198 /* Else both the address substitution and the folding must result in
199 a valid useless type conversion sequence. */
200 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
201 TREE_TYPE (addr))
202 && useless_type_conversion_p (TREE_TYPE (deref),
203 TREE_TYPE (TREE_OPERAND (addr, 0))));
207 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
208 BASE is an array type. OFFSET is a byte displacement.
210 LOC is the location of the original expression. */
212 static tree
213 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
215 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
216 tree array_type, elt_type, elt_size;
217 tree domain_type;
219 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
220 measured in units of the size of elements type) from that ARRAY_REF).
221 We can't do anything if either is variable.
223 The case we handle here is *(&A[N]+O). */
224 if (TREE_CODE (base) == ARRAY_REF)
226 tree low_bound = array_ref_low_bound (base);
228 elt_offset = TREE_OPERAND (base, 1);
229 if (TREE_CODE (low_bound) != INTEGER_CST
230 || TREE_CODE (elt_offset) != INTEGER_CST)
231 return NULL_TREE;
233 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound);
234 base = TREE_OPERAND (base, 0);
237 /* Ignore stupid user tricks of indexing non-array variables. */
238 array_type = TREE_TYPE (base);
239 if (TREE_CODE (array_type) != ARRAY_TYPE)
240 return NULL_TREE;
241 elt_type = TREE_TYPE (array_type);
243 /* Use signed size type for intermediate computation on the index. */
244 idx_type = ssizetype;
246 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
247 element type (so we can use the alignment if it's not constant).
248 Otherwise, compute the offset as an index by using a division. If the
249 division isn't exact, then don't do anything. */
250 elt_size = TYPE_SIZE_UNIT (elt_type);
251 if (!elt_size)
252 return NULL;
253 if (integer_zerop (offset))
255 if (TREE_CODE (elt_size) != INTEGER_CST)
256 elt_size = size_int (TYPE_ALIGN (elt_type));
258 idx = build_int_cst (idx_type, 0);
260 else
262 unsigned HOST_WIDE_INT lquo, lrem;
263 HOST_WIDE_INT hquo, hrem;
264 double_int soffset;
266 /* The final array offset should be signed, so we need
267 to sign-extend the (possibly pointer) offset here
268 and use signed division. */
269 soffset = double_int_sext (tree_to_double_int (offset),
270 TYPE_PRECISION (TREE_TYPE (offset)));
271 if (TREE_CODE (elt_size) != INTEGER_CST
272 || div_and_round_double (TRUNC_DIV_EXPR, 0,
273 soffset.low, soffset.high,
274 TREE_INT_CST_LOW (elt_size),
275 TREE_INT_CST_HIGH (elt_size),
276 &lquo, &hquo, &lrem, &hrem)
277 || lrem || hrem)
278 return NULL_TREE;
280 idx = build_int_cst_wide (idx_type, lquo, hquo);
283 /* Assume the low bound is zero. If there is a domain type, get the
284 low bound, if any, convert the index into that type, and add the
285 low bound. */
286 min_idx = build_int_cst (idx_type, 0);
287 domain_type = TYPE_DOMAIN (array_type);
288 if (domain_type)
290 idx_type = domain_type;
291 if (TYPE_MIN_VALUE (idx_type))
292 min_idx = TYPE_MIN_VALUE (idx_type);
293 else
294 min_idx = fold_convert (idx_type, min_idx);
296 if (TREE_CODE (min_idx) != INTEGER_CST)
297 return NULL_TREE;
299 elt_offset = fold_convert (idx_type, elt_offset);
302 if (!integer_zerop (min_idx))
303 idx = int_const_binop (PLUS_EXPR, idx, min_idx);
304 if (!integer_zerop (elt_offset))
305 idx = int_const_binop (PLUS_EXPR, idx, elt_offset);
307 /* Make sure to possibly truncate late after offsetting. */
308 idx = fold_convert (idx_type, idx);
310 /* We don't want to construct access past array bounds. For example
311 char *(c[4]);
312 c[3][2];
313 should not be simplified into (*c)[14] or tree-vrp will
314 give false warnings.
315 This is only an issue for multi-dimensional arrays. */
316 if (TREE_CODE (elt_type) == ARRAY_TYPE
317 && domain_type)
319 if (TYPE_MAX_VALUE (domain_type)
320 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
321 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
322 return NULL_TREE;
323 else if (TYPE_MIN_VALUE (domain_type)
324 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
325 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
326 return NULL_TREE;
327 else if (compare_tree_int (idx, 0) < 0)
328 return NULL_TREE;
332 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
333 SET_EXPR_LOCATION (t, loc);
334 return t;
339 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
340 LOC is the location of original expression.
342 Before attempting the conversion strip off existing ADDR_EXPRs. */
344 tree
345 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
346 tree orig_type)
348 tree ret;
350 STRIP_NOPS (base);
351 if (TREE_CODE (base) != ADDR_EXPR)
352 return NULL_TREE;
354 base = TREE_OPERAND (base, 0);
355 if (types_compatible_p (orig_type, TREE_TYPE (base))
356 && integer_zerop (offset))
357 return base;
359 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
360 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
361 return ret;
362 return NULL_TREE;
365 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
366 LOC is the location of the original expression. */
368 tree
369 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
370 tree orig_type)
372 tree base, ret;
374 STRIP_NOPS (addr);
375 if (TREE_CODE (addr) != ADDR_EXPR)
376 return NULL_TREE;
377 base = TREE_OPERAND (addr, 0);
378 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
379 if (ret)
381 ret = build_fold_addr_expr (ret);
382 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
383 return NULL_TREE;
384 SET_EXPR_LOCATION (ret, loc);
387 return ret;
391 /* A quaint feature extant in our address arithmetic is that there
392 can be hidden type changes here. The type of the result need
393 not be the same as the type of the input pointer.
395 What we're after here is an expression of the form
396 (T *)(&array + const)
397 where array is OP0, const is OP1, RES_TYPE is T and
398 the cast doesn't actually exist, but is implicit in the
399 type of the POINTER_PLUS_EXPR. We'd like to turn this into
400 &array[x]
401 which may be able to propagate further. */
403 tree
404 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
406 tree ptd_type;
407 tree t;
409 /* The first operand should be an ADDR_EXPR. */
410 if (TREE_CODE (op0) != ADDR_EXPR)
411 return NULL_TREE;
412 op0 = TREE_OPERAND (op0, 0);
414 /* It had better be a constant. */
415 if (TREE_CODE (op1) != INTEGER_CST)
417 /* Or op0 should now be A[0] and the non-constant offset defined
418 via a multiplication by the array element size. */
419 if (TREE_CODE (op0) == ARRAY_REF
420 /* As we will end up creating a variable index array access
421 in the outermost array dimension make sure there isn't
422 a more inner array that the index could overflow to. */
423 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
424 && integer_zerop (TREE_OPERAND (op0, 1))
425 && TREE_CODE (op1) == SSA_NAME)
427 gimple offset_def = SSA_NAME_DEF_STMT (op1);
428 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
429 if (!host_integerp (elsz, 1)
430 || !is_gimple_assign (offset_def))
431 return NULL_TREE;
433 /* Do not build array references of something that we can't
434 see the true number of array dimensions for. */
435 if (!DECL_P (TREE_OPERAND (op0, 0))
436 && !handled_component_p (TREE_OPERAND (op0, 0)))
437 return NULL_TREE;
439 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
440 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
441 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
442 return build_fold_addr_expr
443 (build4 (ARRAY_REF, TREE_TYPE (op0),
444 TREE_OPERAND (op0, 0),
445 gimple_assign_rhs1 (offset_def),
446 TREE_OPERAND (op0, 2),
447 TREE_OPERAND (op0, 3)));
448 else if (integer_onep (elsz)
449 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
450 return build_fold_addr_expr
451 (build4 (ARRAY_REF, TREE_TYPE (op0),
452 TREE_OPERAND (op0, 0),
453 op1,
454 TREE_OPERAND (op0, 2),
455 TREE_OPERAND (op0, 3)));
457 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
458 /* Dto. */
459 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
460 && TREE_CODE (op1) == SSA_NAME)
462 gimple offset_def = SSA_NAME_DEF_STMT (op1);
463 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
464 if (!host_integerp (elsz, 1)
465 || !is_gimple_assign (offset_def))
466 return NULL_TREE;
468 /* Do not build array references of something that we can't
469 see the true number of array dimensions for. */
470 if (!DECL_P (op0)
471 && !handled_component_p (op0))
472 return NULL_TREE;
474 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
475 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
476 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
477 return build_fold_addr_expr
478 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
479 op0, gimple_assign_rhs1 (offset_def),
480 integer_zero_node, NULL_TREE));
481 else if (integer_onep (elsz)
482 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
483 return build_fold_addr_expr
484 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
485 op0, op1,
486 integer_zero_node, NULL_TREE));
489 return NULL_TREE;
492 /* If the first operand is an ARRAY_REF, expand it so that we can fold
493 the offset into it. */
494 while (TREE_CODE (op0) == ARRAY_REF)
496 tree array_obj = TREE_OPERAND (op0, 0);
497 tree array_idx = TREE_OPERAND (op0, 1);
498 tree elt_type = TREE_TYPE (op0);
499 tree elt_size = TYPE_SIZE_UNIT (elt_type);
500 tree min_idx;
502 if (TREE_CODE (array_idx) != INTEGER_CST)
503 break;
504 if (TREE_CODE (elt_size) != INTEGER_CST)
505 break;
507 /* Un-bias the index by the min index of the array type. */
508 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
509 if (min_idx)
511 min_idx = TYPE_MIN_VALUE (min_idx);
512 if (min_idx)
514 if (TREE_CODE (min_idx) != INTEGER_CST)
515 break;
517 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
518 if (!integer_zerop (min_idx))
519 array_idx = int_const_binop (MINUS_EXPR, array_idx,
520 min_idx);
524 /* Convert the index to a byte offset. */
525 array_idx = fold_convert (sizetype, array_idx);
526 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size);
528 /* Update the operands for the next round, or for folding. */
529 op1 = int_const_binop (PLUS_EXPR,
530 array_idx, op1);
531 op0 = array_obj;
534 ptd_type = TREE_TYPE (res_type);
535 /* If we want a pointer to void, reconstruct the reference from the
536 array element type. A pointer to that can be trivially converted
537 to void *. This happens as we fold (void *)(ptr p+ off). */
538 if (VOID_TYPE_P (ptd_type)
539 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
540 ptd_type = TREE_TYPE (TREE_TYPE (op0));
542 /* At which point we can try some of the same things as for indirects. */
543 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
544 if (t)
546 t = build_fold_addr_expr (t);
547 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
548 return NULL_TREE;
549 SET_EXPR_LOCATION (t, loc);
552 return t;
555 /* Subroutine of fold_stmt. We perform several simplifications of the
556 memory reference tree EXPR and make sure to re-gimplify them properly
557 after propagation of constant addresses. IS_LHS is true if the
558 reference is supposed to be an lvalue. */
560 static tree
561 maybe_fold_reference (tree expr, bool is_lhs)
563 tree *t = &expr;
564 tree result;
566 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
567 || TREE_CODE (expr) == REALPART_EXPR
568 || TREE_CODE (expr) == IMAGPART_EXPR)
569 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
570 return fold_unary_loc (EXPR_LOCATION (expr),
571 TREE_CODE (expr),
572 TREE_TYPE (expr),
573 TREE_OPERAND (expr, 0));
574 else if (TREE_CODE (expr) == BIT_FIELD_REF
575 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
576 return fold_ternary_loc (EXPR_LOCATION (expr),
577 TREE_CODE (expr),
578 TREE_TYPE (expr),
579 TREE_OPERAND (expr, 0),
580 TREE_OPERAND (expr, 1),
581 TREE_OPERAND (expr, 2));
583 while (handled_component_p (*t))
584 t = &TREE_OPERAND (*t, 0);
586 /* Canonicalize MEM_REFs invariant address operand. Do this first
587 to avoid feeding non-canonical MEM_REFs elsewhere. */
588 if (TREE_CODE (*t) == MEM_REF
589 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
591 bool volatile_p = TREE_THIS_VOLATILE (*t);
592 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
593 TREE_OPERAND (*t, 0),
594 TREE_OPERAND (*t, 1));
595 if (tem)
597 TREE_THIS_VOLATILE (tem) = volatile_p;
598 *t = tem;
599 tem = maybe_fold_reference (expr, is_lhs);
600 if (tem)
601 return tem;
602 return expr;
606 if (!is_lhs
607 && (result = fold_const_aggregate_ref (expr))
608 && is_gimple_min_invariant (result))
609 return result;
611 /* Fold back MEM_REFs to reference trees. */
612 if (TREE_CODE (*t) == MEM_REF
613 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
614 && integer_zerop (TREE_OPERAND (*t, 1))
615 && (TREE_THIS_VOLATILE (*t)
616 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
617 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
618 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
619 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
620 /* We have to look out here to not drop a required conversion
621 from the rhs to the lhs if is_lhs, but we don't have the
622 rhs here to verify that. Thus require strict type
623 compatibility. */
624 && types_compatible_p (TREE_TYPE (*t),
625 TREE_TYPE (TREE_OPERAND
626 (TREE_OPERAND (*t, 0), 0))))
628 tree tem;
629 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
630 tem = maybe_fold_reference (expr, is_lhs);
631 if (tem)
632 return tem;
633 return expr;
635 else if (TREE_CODE (*t) == TARGET_MEM_REF)
637 tree tem = maybe_fold_tmr (*t);
638 if (tem)
640 *t = tem;
641 tem = maybe_fold_reference (expr, is_lhs);
642 if (tem)
643 return tem;
644 return expr;
648 return NULL_TREE;
652 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
653 replacement rhs for the statement or NULL_TREE if no simplification
654 could be made. It is assumed that the operands have been previously
655 folded. */
657 static tree
658 fold_gimple_assign (gimple_stmt_iterator *si)
660 gimple stmt = gsi_stmt (*si);
661 enum tree_code subcode = gimple_assign_rhs_code (stmt);
662 location_t loc = gimple_location (stmt);
664 tree result = NULL_TREE;
666 switch (get_gimple_rhs_class (subcode))
668 case GIMPLE_SINGLE_RHS:
670 tree rhs = gimple_assign_rhs1 (stmt);
672 /* Try to fold a conditional expression. */
673 if (TREE_CODE (rhs) == COND_EXPR)
675 tree op0 = COND_EXPR_COND (rhs);
676 tree tem;
677 bool set = false;
678 location_t cond_loc = EXPR_LOCATION (rhs);
680 if (COMPARISON_CLASS_P (op0))
682 fold_defer_overflow_warnings ();
683 tem = fold_binary_loc (cond_loc,
684 TREE_CODE (op0), TREE_TYPE (op0),
685 TREE_OPERAND (op0, 0),
686 TREE_OPERAND (op0, 1));
687 /* This is actually a conditional expression, not a GIMPLE
688 conditional statement, however, the valid_gimple_rhs_p
689 test still applies. */
690 set = (tem && is_gimple_condexpr (tem)
691 && valid_gimple_rhs_p (tem));
692 fold_undefer_overflow_warnings (set, stmt, 0);
694 else if (is_gimple_min_invariant (op0))
696 tem = op0;
697 set = true;
699 else
700 return NULL_TREE;
702 if (set)
703 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
704 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
707 else if (REFERENCE_CLASS_P (rhs))
708 return maybe_fold_reference (rhs, false);
710 else if (TREE_CODE (rhs) == ADDR_EXPR)
712 tree ref = TREE_OPERAND (rhs, 0);
713 tree tem = maybe_fold_reference (ref, true);
714 if (tem
715 && TREE_CODE (tem) == MEM_REF
716 && integer_zerop (TREE_OPERAND (tem, 1)))
717 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
718 else if (tem)
719 result = fold_convert (TREE_TYPE (rhs),
720 build_fold_addr_expr_loc (loc, tem));
721 else if (TREE_CODE (ref) == MEM_REF
722 && integer_zerop (TREE_OPERAND (ref, 1)))
723 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
726 else if (TREE_CODE (rhs) == CONSTRUCTOR
727 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
728 && (CONSTRUCTOR_NELTS (rhs)
729 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
731 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
732 unsigned i;
733 tree val;
735 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
736 if (TREE_CODE (val) != INTEGER_CST
737 && TREE_CODE (val) != REAL_CST
738 && TREE_CODE (val) != FIXED_CST)
739 return NULL_TREE;
741 return build_vector_from_ctor (TREE_TYPE (rhs),
742 CONSTRUCTOR_ELTS (rhs));
745 else if (DECL_P (rhs))
746 return unshare_expr (get_symbol_constant_value (rhs));
748 /* If we couldn't fold the RHS, hand over to the generic
749 fold routines. */
750 if (result == NULL_TREE)
751 result = fold (rhs);
753 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
754 that may have been added by fold, and "useless" type
755 conversions that might now be apparent due to propagation. */
756 STRIP_USELESS_TYPE_CONVERSION (result);
758 if (result != rhs && valid_gimple_rhs_p (result))
759 return result;
761 return NULL_TREE;
763 break;
765 case GIMPLE_UNARY_RHS:
767 tree rhs = gimple_assign_rhs1 (stmt);
769 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
770 if (result)
772 /* If the operation was a conversion do _not_ mark a
773 resulting constant with TREE_OVERFLOW if the original
774 constant was not. These conversions have implementation
775 defined behavior and retaining the TREE_OVERFLOW flag
776 here would confuse later passes such as VRP. */
777 if (CONVERT_EXPR_CODE_P (subcode)
778 && TREE_CODE (result) == INTEGER_CST
779 && TREE_CODE (rhs) == INTEGER_CST)
780 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
782 STRIP_USELESS_TYPE_CONVERSION (result);
783 if (valid_gimple_rhs_p (result))
784 return result;
786 else if (CONVERT_EXPR_CODE_P (subcode)
787 && POINTER_TYPE_P (gimple_expr_type (stmt))
788 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
790 tree type = gimple_expr_type (stmt);
791 tree t = maybe_fold_offset_to_address (loc,
792 gimple_assign_rhs1 (stmt),
793 integer_zero_node, type);
794 if (t)
795 return t;
798 break;
800 case GIMPLE_BINARY_RHS:
801 /* Try to fold pointer addition. */
802 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
804 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
805 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
807 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
808 if (!useless_type_conversion_p
809 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
810 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
812 result = maybe_fold_stmt_addition (gimple_location (stmt),
813 type,
814 gimple_assign_rhs1 (stmt),
815 gimple_assign_rhs2 (stmt));
818 if (!result)
819 result = fold_binary_loc (loc, subcode,
820 TREE_TYPE (gimple_assign_lhs (stmt)),
821 gimple_assign_rhs1 (stmt),
822 gimple_assign_rhs2 (stmt));
824 if (result)
826 STRIP_USELESS_TYPE_CONVERSION (result);
827 if (valid_gimple_rhs_p (result))
828 return result;
830 /* Fold might have produced non-GIMPLE, so if we trust it blindly
831 we lose canonicalization opportunities. Do not go again
832 through fold here though, or the same non-GIMPLE will be
833 produced. */
834 if (commutative_tree_code (subcode)
835 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
836 gimple_assign_rhs2 (stmt), false))
837 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
838 gimple_assign_rhs2 (stmt),
839 gimple_assign_rhs1 (stmt));
841 break;
843 case GIMPLE_TERNARY_RHS:
844 result = fold_ternary_loc (loc, subcode,
845 TREE_TYPE (gimple_assign_lhs (stmt)),
846 gimple_assign_rhs1 (stmt),
847 gimple_assign_rhs2 (stmt),
848 gimple_assign_rhs3 (stmt));
850 if (result)
852 STRIP_USELESS_TYPE_CONVERSION (result);
853 if (valid_gimple_rhs_p (result))
854 return result;
856 /* Fold might have produced non-GIMPLE, so if we trust it blindly
857 we lose canonicalization opportunities. Do not go again
858 through fold here though, or the same non-GIMPLE will be
859 produced. */
860 if (commutative_ternary_tree_code (subcode)
861 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
862 gimple_assign_rhs2 (stmt), false))
863 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
864 gimple_assign_rhs2 (stmt),
865 gimple_assign_rhs1 (stmt),
866 gimple_assign_rhs3 (stmt));
868 break;
870 case GIMPLE_INVALID_RHS:
871 gcc_unreachable ();
874 return NULL_TREE;
877 /* Attempt to fold a conditional statement. Return true if any changes were
878 made. We only attempt to fold the condition expression, and do not perform
879 any transformation that would require alteration of the cfg. It is
880 assumed that the operands have been previously folded. */
882 static bool
883 fold_gimple_cond (gimple stmt)
885 tree result = fold_binary_loc (gimple_location (stmt),
886 gimple_cond_code (stmt),
887 boolean_type_node,
888 gimple_cond_lhs (stmt),
889 gimple_cond_rhs (stmt));
891 if (result)
893 STRIP_USELESS_TYPE_CONVERSION (result);
894 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
896 gimple_cond_set_condition_from_tree (stmt, result);
897 return true;
901 return false;
904 /* Convert EXPR into a GIMPLE value suitable for substitution on the
905 RHS of an assignment. Insert the necessary statements before
906 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
907 is replaced. If the call is expected to produces a result, then it
908 is replaced by an assignment of the new RHS to the result variable.
909 If the result is to be ignored, then the call is replaced by a
910 GIMPLE_NOP. A proper VDEF chain is retained by making the first
911 VUSE and the last VDEF of the whole sequence be the same as the replaced
912 statement and using new SSA names for stores in between. */
914 void
915 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
917 tree lhs;
918 tree tmp = NULL_TREE; /* Silence warning. */
919 gimple stmt, new_stmt;
920 gimple_stmt_iterator i;
921 gimple_seq stmts = gimple_seq_alloc();
922 struct gimplify_ctx gctx;
923 gimple last = NULL;
924 gimple laststore = NULL;
925 tree reaching_vuse;
927 stmt = gsi_stmt (*si_p);
929 gcc_assert (is_gimple_call (stmt));
931 lhs = gimple_call_lhs (stmt);
932 reaching_vuse = gimple_vuse (stmt);
934 push_gimplify_context (&gctx);
936 if (lhs == NULL_TREE)
938 gimplify_and_add (expr, &stmts);
939 /* We can end up with folding a memcpy of an empty class assignment
940 which gets optimized away by C++ gimplification. */
941 if (gimple_seq_empty_p (stmts))
943 pop_gimplify_context (NULL);
944 if (gimple_in_ssa_p (cfun))
946 unlink_stmt_vdef (stmt);
947 release_defs (stmt);
949 gsi_remove (si_p, true);
950 return;
953 else
954 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
956 pop_gimplify_context (NULL);
958 if (gimple_has_location (stmt))
959 annotate_all_with_location (stmts, gimple_location (stmt));
961 /* The replacement can expose previously unreferenced variables. */
962 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
964 if (last)
966 gsi_insert_before (si_p, last, GSI_NEW_STMT);
967 gsi_next (si_p);
969 new_stmt = gsi_stmt (i);
970 if (gimple_in_ssa_p (cfun))
972 find_new_referenced_vars (new_stmt);
973 mark_symbols_for_renaming (new_stmt);
975 /* If the new statement has a VUSE, update it with exact SSA name we
976 know will reach this one. */
977 if (gimple_vuse (new_stmt))
979 /* If we've also seen a previous store create a new VDEF for
980 the latter one, and make that the new reaching VUSE. */
981 if (laststore)
983 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
984 gimple_set_vdef (laststore, reaching_vuse);
985 update_stmt (laststore);
986 laststore = NULL;
988 gimple_set_vuse (new_stmt, reaching_vuse);
989 gimple_set_modified (new_stmt, true);
991 if (gimple_assign_single_p (new_stmt)
992 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
994 laststore = new_stmt;
996 last = new_stmt;
999 if (lhs == NULL_TREE)
1001 /* If we replace a call without LHS that has a VDEF and our new
1002 sequence ends with a store we must make that store have the same
1003 vdef in order not to break the sequencing. This can happen
1004 for instance when folding memcpy calls into assignments. */
1005 if (gimple_vdef (stmt) && laststore)
1007 gimple_set_vdef (laststore, gimple_vdef (stmt));
1008 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1009 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1010 update_stmt (laststore);
1012 else if (gimple_in_ssa_p (cfun))
1014 unlink_stmt_vdef (stmt);
1015 release_defs (stmt);
1017 new_stmt = last;
1019 else
1021 if (last)
1023 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1024 gsi_next (si_p);
1026 if (laststore && is_gimple_reg (lhs))
1028 gimple_set_vdef (laststore, gimple_vdef (stmt));
1029 update_stmt (laststore);
1030 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1031 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1032 laststore = NULL;
1034 else if (laststore)
1036 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1037 gimple_set_vdef (laststore, reaching_vuse);
1038 update_stmt (laststore);
1039 laststore = NULL;
1041 new_stmt = gimple_build_assign (lhs, tmp);
1042 if (!is_gimple_reg (tmp))
1043 gimple_set_vuse (new_stmt, reaching_vuse);
1044 if (!is_gimple_reg (lhs))
1046 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1047 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1048 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1050 else if (reaching_vuse == gimple_vuse (stmt))
1051 unlink_stmt_vdef (stmt);
1054 gimple_set_location (new_stmt, gimple_location (stmt));
1055 gsi_replace (si_p, new_stmt, false);
1058 /* Return the string length, maximum string length or maximum value of
1059 ARG in LENGTH.
1060 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1061 is not NULL and, for TYPE == 0, its value is not equal to the length
1062 we determine or if we are unable to determine the length or value,
1063 return false. VISITED is a bitmap of visited variables.
1064 TYPE is 0 if string length should be returned, 1 for maximum string
1065 length and 2 for maximum value ARG can have. */
1067 static bool
1068 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1070 tree var, val;
1071 gimple def_stmt;
1073 if (TREE_CODE (arg) != SSA_NAME)
1075 if (TREE_CODE (arg) == COND_EXPR)
1076 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1077 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1078 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1079 else if (TREE_CODE (arg) == ADDR_EXPR
1080 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1081 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1083 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1084 if (TREE_CODE (aop0) == INDIRECT_REF
1085 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1086 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1087 length, visited, type);
1090 if (type == 2)
1092 val = arg;
1093 if (TREE_CODE (val) != INTEGER_CST
1094 || tree_int_cst_sgn (val) < 0)
1095 return false;
1097 else
1098 val = c_strlen (arg, 1);
1099 if (!val)
1100 return false;
1102 if (*length)
1104 if (type > 0)
1106 if (TREE_CODE (*length) != INTEGER_CST
1107 || TREE_CODE (val) != INTEGER_CST)
1108 return false;
1110 if (tree_int_cst_lt (*length, val))
1111 *length = val;
1112 return true;
1114 else if (simple_cst_equal (val, *length) != 1)
1115 return false;
1118 *length = val;
1119 return true;
1122 /* If we were already here, break the infinite cycle. */
1123 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1124 return true;
1126 var = arg;
1127 def_stmt = SSA_NAME_DEF_STMT (var);
1129 switch (gimple_code (def_stmt))
1131 case GIMPLE_ASSIGN:
1132 /* The RHS of the statement defining VAR must either have a
1133 constant length or come from another SSA_NAME with a constant
1134 length. */
1135 if (gimple_assign_single_p (def_stmt)
1136 || gimple_assign_unary_nop_p (def_stmt))
1138 tree rhs = gimple_assign_rhs1 (def_stmt);
1139 return get_maxval_strlen (rhs, length, visited, type);
1141 return false;
1143 case GIMPLE_PHI:
1145 /* All the arguments of the PHI node must have the same constant
1146 length. */
1147 unsigned i;
1149 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1151 tree arg = gimple_phi_arg (def_stmt, i)->def;
1153 /* If this PHI has itself as an argument, we cannot
1154 determine the string length of this argument. However,
1155 if we can find a constant string length for the other
1156 PHI args then we can still be sure that this is a
1157 constant string length. So be optimistic and just
1158 continue with the next argument. */
1159 if (arg == gimple_phi_result (def_stmt))
1160 continue;
1162 if (!get_maxval_strlen (arg, length, visited, type))
1163 return false;
1166 return true;
1168 default:
1169 return false;
1174 /* Fold builtin call in statement STMT. Returns a simplified tree.
1175 We may return a non-constant expression, including another call
1176 to a different function and with different arguments, e.g.,
1177 substituting memcpy for strcpy when the string length is known.
1178 Note that some builtins expand into inline code that may not
1179 be valid in GIMPLE. Callers must take care. */
1181 tree
1182 gimple_fold_builtin (gimple stmt)
1184 tree result, val[3];
1185 tree callee, a;
1186 int arg_idx, type;
1187 bitmap visited;
1188 bool ignore;
1189 int nargs;
1190 location_t loc = gimple_location (stmt);
1192 gcc_assert (is_gimple_call (stmt));
1194 ignore = (gimple_call_lhs (stmt) == NULL);
1196 /* First try the generic builtin folder. If that succeeds, return the
1197 result directly. */
1198 result = fold_call_stmt (stmt, ignore);
1199 if (result)
1201 if (ignore)
1202 STRIP_NOPS (result);
1203 return result;
1206 /* Ignore MD builtins. */
1207 callee = gimple_call_fndecl (stmt);
1208 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1209 return NULL_TREE;
1211 /* If the builtin could not be folded, and it has no argument list,
1212 we're done. */
1213 nargs = gimple_call_num_args (stmt);
1214 if (nargs == 0)
1215 return NULL_TREE;
1217 /* Limit the work only for builtins we know how to simplify. */
1218 switch (DECL_FUNCTION_CODE (callee))
1220 case BUILT_IN_STRLEN:
1221 case BUILT_IN_FPUTS:
1222 case BUILT_IN_FPUTS_UNLOCKED:
1223 arg_idx = 0;
1224 type = 0;
1225 break;
1226 case BUILT_IN_STRCPY:
1227 case BUILT_IN_STRNCPY:
1228 arg_idx = 1;
1229 type = 0;
1230 break;
1231 case BUILT_IN_MEMCPY_CHK:
1232 case BUILT_IN_MEMPCPY_CHK:
1233 case BUILT_IN_MEMMOVE_CHK:
1234 case BUILT_IN_MEMSET_CHK:
1235 case BUILT_IN_STRNCPY_CHK:
1236 arg_idx = 2;
1237 type = 2;
1238 break;
1239 case BUILT_IN_STRCPY_CHK:
1240 case BUILT_IN_STPCPY_CHK:
1241 arg_idx = 1;
1242 type = 1;
1243 break;
1244 case BUILT_IN_SNPRINTF_CHK:
1245 case BUILT_IN_VSNPRINTF_CHK:
1246 arg_idx = 1;
1247 type = 2;
1248 break;
1249 default:
1250 return NULL_TREE;
1253 if (arg_idx >= nargs)
1254 return NULL_TREE;
1256 /* Try to use the dataflow information gathered by the CCP process. */
1257 visited = BITMAP_ALLOC (NULL);
1258 bitmap_clear (visited);
1260 memset (val, 0, sizeof (val));
1261 a = gimple_call_arg (stmt, arg_idx);
1262 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1263 val[arg_idx] = NULL_TREE;
1265 BITMAP_FREE (visited);
1267 result = NULL_TREE;
1268 switch (DECL_FUNCTION_CODE (callee))
1270 case BUILT_IN_STRLEN:
1271 if (val[0] && nargs == 1)
1273 tree new_val =
1274 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1276 /* If the result is not a valid gimple value, or not a cast
1277 of a valid gimple value, then we cannot use the result. */
1278 if (is_gimple_val (new_val)
1279 || (CONVERT_EXPR_P (new_val)
1280 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1281 return new_val;
1283 break;
1285 case BUILT_IN_STRCPY:
1286 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1287 result = fold_builtin_strcpy (loc, callee,
1288 gimple_call_arg (stmt, 0),
1289 gimple_call_arg (stmt, 1),
1290 val[1]);
1291 break;
1293 case BUILT_IN_STRNCPY:
1294 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1295 result = fold_builtin_strncpy (loc, callee,
1296 gimple_call_arg (stmt, 0),
1297 gimple_call_arg (stmt, 1),
1298 gimple_call_arg (stmt, 2),
1299 val[1]);
1300 break;
1302 case BUILT_IN_FPUTS:
1303 if (nargs == 2)
1304 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1305 gimple_call_arg (stmt, 1),
1306 ignore, false, val[0]);
1307 break;
1309 case BUILT_IN_FPUTS_UNLOCKED:
1310 if (nargs == 2)
1311 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1312 gimple_call_arg (stmt, 1),
1313 ignore, true, val[0]);
1314 break;
1316 case BUILT_IN_MEMCPY_CHK:
1317 case BUILT_IN_MEMPCPY_CHK:
1318 case BUILT_IN_MEMMOVE_CHK:
1319 case BUILT_IN_MEMSET_CHK:
1320 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1321 result = fold_builtin_memory_chk (loc, callee,
1322 gimple_call_arg (stmt, 0),
1323 gimple_call_arg (stmt, 1),
1324 gimple_call_arg (stmt, 2),
1325 gimple_call_arg (stmt, 3),
1326 val[2], ignore,
1327 DECL_FUNCTION_CODE (callee));
1328 break;
1330 case BUILT_IN_STRCPY_CHK:
1331 case BUILT_IN_STPCPY_CHK:
1332 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1333 result = fold_builtin_stxcpy_chk (loc, callee,
1334 gimple_call_arg (stmt, 0),
1335 gimple_call_arg (stmt, 1),
1336 gimple_call_arg (stmt, 2),
1337 val[1], ignore,
1338 DECL_FUNCTION_CODE (callee));
1339 break;
1341 case BUILT_IN_STRNCPY_CHK:
1342 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1343 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1344 gimple_call_arg (stmt, 1),
1345 gimple_call_arg (stmt, 2),
1346 gimple_call_arg (stmt, 3),
1347 val[2]);
1348 break;
1350 case BUILT_IN_SNPRINTF_CHK:
1351 case BUILT_IN_VSNPRINTF_CHK:
1352 if (val[1] && is_gimple_val (val[1]))
1353 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1354 DECL_FUNCTION_CODE (callee));
1355 break;
1357 default:
1358 gcc_unreachable ();
1361 if (result && ignore)
1362 result = fold_ignored_result (result);
1363 return result;
1366 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1367 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1368 KNOWN_BINFO carries the binfo describing the true type of
1369 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1370 with a this adjustment, the constant which should be added to this pointer
1371 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1372 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1374 tree
1375 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
1376 tree *delta, bool refuse_thunks)
1378 HOST_WIDE_INT i;
1379 tree v, fndecl;
1380 struct cgraph_node *node;
1382 v = BINFO_VIRTUALS (known_binfo);
1383 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1384 if (!v)
1385 return NULL_TREE;
1386 i = 0;
1387 while (i != token)
1389 i += (TARGET_VTABLE_USES_DESCRIPTORS
1390 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1391 v = TREE_CHAIN (v);
1394 /* If BV_VCALL_INDEX is non-NULL, give up. */
1395 if (TREE_TYPE (v))
1396 return NULL_TREE;
1398 fndecl = TREE_VALUE (v);
1399 node = cgraph_get_node_or_alias (fndecl);
1400 if (refuse_thunks
1401 && (!node
1402 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1403 thunks are represented by a constant in TREE_PURPOSE of items in
1404 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1405 yet.
1407 FIXME: Remove the following condition once we are able to represent
1408 thunk information on call graph edges. */
1409 || (node->same_body_alias && node->thunk.thunk_p)))
1410 return NULL_TREE;
1412 /* When cgraph node is missing and function is not public, we cannot
1413 devirtualize. This can happen in WHOPR when the actual method
1414 ends up in other partition, because we found devirtualization
1415 possibility too late. */
1416 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1417 return NULL_TREE;
1419 *delta = TREE_PURPOSE (v);
1420 gcc_checking_assert (host_integerp (*delta, 0));
1421 return fndecl;
1424 /* Generate code adjusting the first parameter of a call statement determined
1425 by GSI by DELTA. */
1427 void
1428 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1430 gimple call_stmt = gsi_stmt (*gsi);
1431 tree parm, tmp;
1432 gimple new_stmt;
1434 delta = fold_convert (sizetype, delta);
1435 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1436 parm = gimple_call_arg (call_stmt, 0);
1437 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1438 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1439 add_referenced_var (tmp);
1441 tmp = make_ssa_name (tmp, NULL);
1442 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1443 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1444 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1445 gimple_call_set_arg (call_stmt, 0, tmp);
1448 /* Return a binfo to be used for devirtualization of calls based on an object
1449 represented by a declaration (i.e. a global or automatically allocated one)
1450 or NULL if it cannot be found or is not safe. CST is expected to be an
1451 ADDR_EXPR of such object or the function will return NULL. Currently it is
1452 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1454 tree
1455 gimple_extract_devirt_binfo_from_cst (tree cst)
1457 HOST_WIDE_INT offset, size, max_size;
1458 tree base, type, expected_type, binfo;
1459 bool last_artificial = false;
1461 if (!flag_devirtualize
1462 || TREE_CODE (cst) != ADDR_EXPR
1463 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1464 return NULL_TREE;
1466 cst = TREE_OPERAND (cst, 0);
1467 expected_type = TREE_TYPE (cst);
1468 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1469 type = TREE_TYPE (base);
1470 if (!DECL_P (base)
1471 || max_size == -1
1472 || max_size != size
1473 || TREE_CODE (type) != RECORD_TYPE)
1474 return NULL_TREE;
1476 /* Find the sub-object the constant actually refers to and mark whether it is
1477 an artificial one (as opposed to a user-defined one). */
1478 while (true)
1480 HOST_WIDE_INT pos, size;
1481 tree fld;
1483 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1484 break;
1485 if (offset < 0)
1486 return NULL_TREE;
1488 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1490 if (TREE_CODE (fld) != FIELD_DECL)
1491 continue;
1493 pos = int_bit_position (fld);
1494 size = tree_low_cst (DECL_SIZE (fld), 1);
1495 if (pos <= offset && (pos + size) > offset)
1496 break;
1498 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1499 return NULL_TREE;
1501 last_artificial = DECL_ARTIFICIAL (fld);
1502 type = TREE_TYPE (fld);
1503 offset -= pos;
1505 /* Artifical sub-objects are ancestors, we do not want to use them for
1506 devirtualization, at least not here. */
1507 if (last_artificial)
1508 return NULL_TREE;
1509 binfo = TYPE_BINFO (type);
1510 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1511 return NULL_TREE;
1512 else
1513 return binfo;
1516 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1517 The statement may be replaced by another statement, e.g., if the call
1518 simplifies to a constant value. Return true if any changes were made.
1519 It is assumed that the operands have been previously folded. */
1521 bool
1522 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1524 gimple stmt = gsi_stmt (*gsi);
1525 tree callee;
1527 /* Check for builtins that CCP can handle using information not
1528 available in the generic fold routines. */
1529 callee = gimple_call_fndecl (stmt);
1530 if (!inplace && callee && DECL_BUILT_IN (callee))
1532 tree result = gimple_fold_builtin (stmt);
1534 if (result)
1536 if (!update_call_from_tree (gsi, result))
1537 gimplify_and_update_call_from_tree (gsi, result);
1538 return true;
1542 /* Check for virtual calls that became direct calls. */
1543 callee = gimple_call_fn (stmt);
1544 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1546 tree binfo, fndecl, delta, obj;
1547 HOST_WIDE_INT token;
1549 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1551 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1552 return true;
1555 obj = OBJ_TYPE_REF_OBJECT (callee);
1556 binfo = gimple_extract_devirt_binfo_from_cst (obj);
1557 if (!binfo)
1558 return false;
1559 token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1560 fndecl = gimple_get_virt_method_for_binfo (token, binfo, &delta, false);
1561 if (!fndecl)
1562 return false;
1563 gcc_assert (integer_zerop (delta));
1564 gimple_call_set_fndecl (stmt, fndecl);
1565 return true;
1568 return false;
1571 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1572 distinguishes both cases. */
1574 static bool
1575 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1577 bool changed = false;
1578 gimple stmt = gsi_stmt (*gsi);
1579 unsigned i;
1580 gimple_stmt_iterator gsinext = *gsi;
1581 gimple next_stmt;
1583 gsi_next (&gsinext);
1584 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1586 /* Fold the main computation performed by the statement. */
1587 switch (gimple_code (stmt))
1589 case GIMPLE_ASSIGN:
1591 unsigned old_num_ops = gimple_num_ops (stmt);
1592 tree new_rhs = fold_gimple_assign (gsi);
1593 tree lhs = gimple_assign_lhs (stmt);
1594 if (new_rhs
1595 && !useless_type_conversion_p (TREE_TYPE (lhs),
1596 TREE_TYPE (new_rhs)))
1597 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1598 if (new_rhs
1599 && (!inplace
1600 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1602 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1603 changed = true;
1605 break;
1608 case GIMPLE_COND:
1609 changed |= fold_gimple_cond (stmt);
1610 break;
1612 case GIMPLE_CALL:
1613 /* Fold *& in call arguments. */
1614 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1615 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1617 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1618 if (tmp)
1620 gimple_call_set_arg (stmt, i, tmp);
1621 changed = true;
1624 changed |= gimple_fold_call (gsi, inplace);
1625 break;
1627 case GIMPLE_ASM:
1628 /* Fold *& in asm operands. */
1629 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1631 tree link = gimple_asm_output_op (stmt, i);
1632 tree op = TREE_VALUE (link);
1633 if (REFERENCE_CLASS_P (op)
1634 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1636 TREE_VALUE (link) = op;
1637 changed = true;
1640 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1642 tree link = gimple_asm_input_op (stmt, i);
1643 tree op = TREE_VALUE (link);
1644 if (REFERENCE_CLASS_P (op)
1645 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1647 TREE_VALUE (link) = op;
1648 changed = true;
1651 break;
1653 case GIMPLE_DEBUG:
1654 if (gimple_debug_bind_p (stmt))
1656 tree val = gimple_debug_bind_get_value (stmt);
1657 if (val
1658 && REFERENCE_CLASS_P (val))
1660 tree tem = maybe_fold_reference (val, false);
1661 if (tem)
1663 gimple_debug_bind_set_value (stmt, tem);
1664 changed = true;
1668 break;
1670 default:;
1673 /* If stmt folds into nothing and it was the last stmt in a bb,
1674 don't call gsi_stmt. */
1675 if (gsi_end_p (*gsi))
1677 gcc_assert (next_stmt == NULL);
1678 return changed;
1681 stmt = gsi_stmt (*gsi);
1683 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1684 as we'd changing the next stmt. */
1685 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1687 tree lhs = gimple_get_lhs (stmt);
1688 if (lhs && REFERENCE_CLASS_P (lhs))
1690 tree new_lhs = maybe_fold_reference (lhs, true);
1691 if (new_lhs)
1693 gimple_set_lhs (stmt, new_lhs);
1694 changed = true;
1699 return changed;
1702 /* Fold the statement pointed to by GSI. In some cases, this function may
1703 replace the whole statement with a new one. Returns true iff folding
1704 makes any changes.
1705 The statement pointed to by GSI should be in valid gimple form but may
1706 be in unfolded state as resulting from for example constant propagation
1707 which can produce *&x = 0. */
1709 bool
1710 fold_stmt (gimple_stmt_iterator *gsi)
1712 return fold_stmt_1 (gsi, false);
1715 /* Perform the minimal folding on statement STMT. Only operations like
1716 *&x created by constant propagation are handled. The statement cannot
1717 be replaced with a new one. Return true if the statement was
1718 changed, false otherwise.
1719 The statement STMT should be in valid gimple form but may
1720 be in unfolded state as resulting from for example constant propagation
1721 which can produce *&x = 0. */
1723 bool
1724 fold_stmt_inplace (gimple stmt)
1726 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1727 bool changed = fold_stmt_1 (&gsi, true);
1728 gcc_assert (gsi_stmt (gsi) == stmt);
1729 return changed;
1732 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1733 if EXPR is null or we don't know how.
1734 If non-null, the result always has boolean type. */
1736 static tree
1737 canonicalize_bool (tree expr, bool invert)
1739 if (!expr)
1740 return NULL_TREE;
1741 else if (invert)
1743 if (integer_nonzerop (expr))
1744 return boolean_false_node;
1745 else if (integer_zerop (expr))
1746 return boolean_true_node;
1747 else if (TREE_CODE (expr) == SSA_NAME)
1748 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1749 build_int_cst (TREE_TYPE (expr), 0));
1750 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1751 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1752 boolean_type_node,
1753 TREE_OPERAND (expr, 0),
1754 TREE_OPERAND (expr, 1));
1755 else
1756 return NULL_TREE;
1758 else
1760 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1761 return expr;
1762 if (integer_nonzerop (expr))
1763 return boolean_true_node;
1764 else if (integer_zerop (expr))
1765 return boolean_false_node;
1766 else if (TREE_CODE (expr) == SSA_NAME)
1767 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1768 build_int_cst (TREE_TYPE (expr), 0));
1769 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1770 return fold_build2 (TREE_CODE (expr),
1771 boolean_type_node,
1772 TREE_OPERAND (expr, 0),
1773 TREE_OPERAND (expr, 1));
1774 else
1775 return NULL_TREE;
1779 /* Check to see if a boolean expression EXPR is logically equivalent to the
1780 comparison (OP1 CODE OP2). Check for various identities involving
1781 SSA_NAMEs. */
1783 static bool
1784 same_bool_comparison_p (const_tree expr, enum tree_code code,
1785 const_tree op1, const_tree op2)
1787 gimple s;
1789 /* The obvious case. */
1790 if (TREE_CODE (expr) == code
1791 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1792 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1793 return true;
1795 /* Check for comparing (name, name != 0) and the case where expr
1796 is an SSA_NAME with a definition matching the comparison. */
1797 if (TREE_CODE (expr) == SSA_NAME
1798 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1800 if (operand_equal_p (expr, op1, 0))
1801 return ((code == NE_EXPR && integer_zerop (op2))
1802 || (code == EQ_EXPR && integer_nonzerop (op2)));
1803 s = SSA_NAME_DEF_STMT (expr);
1804 if (is_gimple_assign (s)
1805 && gimple_assign_rhs_code (s) == code
1806 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1807 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1808 return true;
1811 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1812 of name is a comparison, recurse. */
1813 if (TREE_CODE (op1) == SSA_NAME
1814 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1816 s = SSA_NAME_DEF_STMT (op1);
1817 if (is_gimple_assign (s)
1818 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1820 enum tree_code c = gimple_assign_rhs_code (s);
1821 if ((c == NE_EXPR && integer_zerop (op2))
1822 || (c == EQ_EXPR && integer_nonzerop (op2)))
1823 return same_bool_comparison_p (expr, c,
1824 gimple_assign_rhs1 (s),
1825 gimple_assign_rhs2 (s));
1826 if ((c == EQ_EXPR && integer_zerop (op2))
1827 || (c == NE_EXPR && integer_nonzerop (op2)))
1828 return same_bool_comparison_p (expr,
1829 invert_tree_comparison (c, false),
1830 gimple_assign_rhs1 (s),
1831 gimple_assign_rhs2 (s));
1834 return false;
1837 /* Check to see if two boolean expressions OP1 and OP2 are logically
1838 equivalent. */
1840 static bool
1841 same_bool_result_p (const_tree op1, const_tree op2)
1843 /* Simple cases first. */
1844 if (operand_equal_p (op1, op2, 0))
1845 return true;
1847 /* Check the cases where at least one of the operands is a comparison.
1848 These are a bit smarter than operand_equal_p in that they apply some
1849 identifies on SSA_NAMEs. */
1850 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1851 && same_bool_comparison_p (op1, TREE_CODE (op2),
1852 TREE_OPERAND (op2, 0),
1853 TREE_OPERAND (op2, 1)))
1854 return true;
1855 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1856 && same_bool_comparison_p (op2, TREE_CODE (op1),
1857 TREE_OPERAND (op1, 0),
1858 TREE_OPERAND (op1, 1)))
1859 return true;
1861 /* Default case. */
1862 return false;
1865 /* Forward declarations for some mutually recursive functions. */
1867 static tree
1868 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1869 enum tree_code code2, tree op2a, tree op2b);
1870 static tree
1871 and_var_with_comparison (tree var, bool invert,
1872 enum tree_code code2, tree op2a, tree op2b);
1873 static tree
1874 and_var_with_comparison_1 (gimple stmt,
1875 enum tree_code code2, tree op2a, tree op2b);
1876 static tree
1877 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1878 enum tree_code code2, tree op2a, tree op2b);
1879 static tree
1880 or_var_with_comparison (tree var, bool invert,
1881 enum tree_code code2, tree op2a, tree op2b);
1882 static tree
1883 or_var_with_comparison_1 (gimple stmt,
1884 enum tree_code code2, tree op2a, tree op2b);
1886 /* Helper function for and_comparisons_1: try to simplify the AND of the
1887 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1888 If INVERT is true, invert the value of the VAR before doing the AND.
1889 Return NULL_EXPR if we can't simplify this to a single expression. */
1891 static tree
1892 and_var_with_comparison (tree var, bool invert,
1893 enum tree_code code2, tree op2a, tree op2b)
1895 tree t;
1896 gimple stmt = SSA_NAME_DEF_STMT (var);
1898 /* We can only deal with variables whose definitions are assignments. */
1899 if (!is_gimple_assign (stmt))
1900 return NULL_TREE;
1902 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1903 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1904 Then we only have to consider the simpler non-inverted cases. */
1905 if (invert)
1906 t = or_var_with_comparison_1 (stmt,
1907 invert_tree_comparison (code2, false),
1908 op2a, op2b);
1909 else
1910 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1911 return canonicalize_bool (t, invert);
1914 /* Try to simplify the AND of the ssa variable defined by the assignment
1915 STMT with the comparison specified by (OP2A CODE2 OP2B).
1916 Return NULL_EXPR if we can't simplify this to a single expression. */
1918 static tree
1919 and_var_with_comparison_1 (gimple stmt,
1920 enum tree_code code2, tree op2a, tree op2b)
1922 tree var = gimple_assign_lhs (stmt);
1923 tree true_test_var = NULL_TREE;
1924 tree false_test_var = NULL_TREE;
1925 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1927 /* Check for identities like (var AND (var == 0)) => false. */
1928 if (TREE_CODE (op2a) == SSA_NAME
1929 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1931 if ((code2 == NE_EXPR && integer_zerop (op2b))
1932 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1934 true_test_var = op2a;
1935 if (var == true_test_var)
1936 return var;
1938 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1939 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1941 false_test_var = op2a;
1942 if (var == false_test_var)
1943 return boolean_false_node;
1947 /* If the definition is a comparison, recurse on it. */
1948 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1950 tree t = and_comparisons_1 (innercode,
1951 gimple_assign_rhs1 (stmt),
1952 gimple_assign_rhs2 (stmt),
1953 code2,
1954 op2a,
1955 op2b);
1956 if (t)
1957 return t;
1960 /* If the definition is an AND or OR expression, we may be able to
1961 simplify by reassociating. */
1962 if (innercode == TRUTH_AND_EXPR
1963 || innercode == TRUTH_OR_EXPR
1964 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1965 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1967 tree inner1 = gimple_assign_rhs1 (stmt);
1968 tree inner2 = gimple_assign_rhs2 (stmt);
1969 gimple s;
1970 tree t;
1971 tree partial = NULL_TREE;
1972 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1974 /* Check for boolean identities that don't require recursive examination
1975 of inner1/inner2:
1976 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1977 inner1 AND (inner1 OR inner2) => inner1
1978 !inner1 AND (inner1 AND inner2) => false
1979 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1980 Likewise for similar cases involving inner2. */
1981 if (inner1 == true_test_var)
1982 return (is_and ? var : inner1);
1983 else if (inner2 == true_test_var)
1984 return (is_and ? var : inner2);
1985 else if (inner1 == false_test_var)
1986 return (is_and
1987 ? boolean_false_node
1988 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1989 else if (inner2 == false_test_var)
1990 return (is_and
1991 ? boolean_false_node
1992 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1994 /* Next, redistribute/reassociate the AND across the inner tests.
1995 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1996 if (TREE_CODE (inner1) == SSA_NAME
1997 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1998 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1999 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2000 gimple_assign_rhs1 (s),
2001 gimple_assign_rhs2 (s),
2002 code2, op2a, op2b)))
2004 /* Handle the AND case, where we are reassociating:
2005 (inner1 AND inner2) AND (op2a code2 op2b)
2006 => (t AND inner2)
2007 If the partial result t is a constant, we win. Otherwise
2008 continue on to try reassociating with the other inner test. */
2009 if (is_and)
2011 if (integer_onep (t))
2012 return inner2;
2013 else if (integer_zerop (t))
2014 return boolean_false_node;
2017 /* Handle the OR case, where we are redistributing:
2018 (inner1 OR inner2) AND (op2a code2 op2b)
2019 => (t OR (inner2 AND (op2a code2 op2b))) */
2020 else if (integer_onep (t))
2021 return boolean_true_node;
2023 /* Save partial result for later. */
2024 partial = t;
2027 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2028 if (TREE_CODE (inner2) == SSA_NAME
2029 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2030 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2031 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2032 gimple_assign_rhs1 (s),
2033 gimple_assign_rhs2 (s),
2034 code2, op2a, op2b)))
2036 /* Handle the AND case, where we are reassociating:
2037 (inner1 AND inner2) AND (op2a code2 op2b)
2038 => (inner1 AND t) */
2039 if (is_and)
2041 if (integer_onep (t))
2042 return inner1;
2043 else if (integer_zerop (t))
2044 return boolean_false_node;
2045 /* If both are the same, we can apply the identity
2046 (x AND x) == x. */
2047 else if (partial && same_bool_result_p (t, partial))
2048 return t;
2051 /* Handle the OR case. where we are redistributing:
2052 (inner1 OR inner2) AND (op2a code2 op2b)
2053 => (t OR (inner1 AND (op2a code2 op2b)))
2054 => (t OR partial) */
2055 else
2057 if (integer_onep (t))
2058 return boolean_true_node;
2059 else if (partial)
2061 /* We already got a simplification for the other
2062 operand to the redistributed OR expression. The
2063 interesting case is when at least one is false.
2064 Or, if both are the same, we can apply the identity
2065 (x OR x) == x. */
2066 if (integer_zerop (partial))
2067 return t;
2068 else if (integer_zerop (t))
2069 return partial;
2070 else if (same_bool_result_p (t, partial))
2071 return t;
2076 return NULL_TREE;
2079 /* Try to simplify the AND of two comparisons defined by
2080 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2081 If this can be done without constructing an intermediate value,
2082 return the resulting tree; otherwise NULL_TREE is returned.
2083 This function is deliberately asymmetric as it recurses on SSA_DEFs
2084 in the first comparison but not the second. */
2086 static tree
2087 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2088 enum tree_code code2, tree op2a, tree op2b)
2090 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2091 if (operand_equal_p (op1a, op2a, 0)
2092 && operand_equal_p (op1b, op2b, 0))
2094 tree t = combine_comparisons (UNKNOWN_LOCATION,
2095 TRUTH_ANDIF_EXPR, code1, code2,
2096 boolean_type_node, op1a, op1b);
2097 if (t)
2098 return t;
2101 /* Likewise the swapped case of the above. */
2102 if (operand_equal_p (op1a, op2b, 0)
2103 && operand_equal_p (op1b, op2a, 0))
2105 tree t = combine_comparisons (UNKNOWN_LOCATION,
2106 TRUTH_ANDIF_EXPR, code1,
2107 swap_tree_comparison (code2),
2108 boolean_type_node, op1a, op1b);
2109 if (t)
2110 return t;
2113 /* If both comparisons are of the same value against constants, we might
2114 be able to merge them. */
2115 if (operand_equal_p (op1a, op2a, 0)
2116 && TREE_CODE (op1b) == INTEGER_CST
2117 && TREE_CODE (op2b) == INTEGER_CST)
2119 int cmp = tree_int_cst_compare (op1b, op2b);
2121 /* If we have (op1a == op1b), we should either be able to
2122 return that or FALSE, depending on whether the constant op1b
2123 also satisfies the other comparison against op2b. */
2124 if (code1 == EQ_EXPR)
2126 bool done = true;
2127 bool val;
2128 switch (code2)
2130 case EQ_EXPR: val = (cmp == 0); break;
2131 case NE_EXPR: val = (cmp != 0); break;
2132 case LT_EXPR: val = (cmp < 0); break;
2133 case GT_EXPR: val = (cmp > 0); break;
2134 case LE_EXPR: val = (cmp <= 0); break;
2135 case GE_EXPR: val = (cmp >= 0); break;
2136 default: done = false;
2138 if (done)
2140 if (val)
2141 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2142 else
2143 return boolean_false_node;
2146 /* Likewise if the second comparison is an == comparison. */
2147 else if (code2 == EQ_EXPR)
2149 bool done = true;
2150 bool val;
2151 switch (code1)
2153 case EQ_EXPR: val = (cmp == 0); break;
2154 case NE_EXPR: val = (cmp != 0); break;
2155 case LT_EXPR: val = (cmp > 0); break;
2156 case GT_EXPR: val = (cmp < 0); break;
2157 case LE_EXPR: val = (cmp >= 0); break;
2158 case GE_EXPR: val = (cmp <= 0); break;
2159 default: done = false;
2161 if (done)
2163 if (val)
2164 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2165 else
2166 return boolean_false_node;
2170 /* Same business with inequality tests. */
2171 else if (code1 == NE_EXPR)
2173 bool val;
2174 switch (code2)
2176 case EQ_EXPR: val = (cmp != 0); break;
2177 case NE_EXPR: val = (cmp == 0); break;
2178 case LT_EXPR: val = (cmp >= 0); break;
2179 case GT_EXPR: val = (cmp <= 0); break;
2180 case LE_EXPR: val = (cmp > 0); break;
2181 case GE_EXPR: val = (cmp < 0); break;
2182 default:
2183 val = false;
2185 if (val)
2186 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2188 else if (code2 == NE_EXPR)
2190 bool val;
2191 switch (code1)
2193 case EQ_EXPR: val = (cmp == 0); break;
2194 case NE_EXPR: val = (cmp != 0); break;
2195 case LT_EXPR: val = (cmp <= 0); break;
2196 case GT_EXPR: val = (cmp >= 0); break;
2197 case LE_EXPR: val = (cmp < 0); break;
2198 case GE_EXPR: val = (cmp > 0); break;
2199 default:
2200 val = false;
2202 if (val)
2203 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2206 /* Chose the more restrictive of two < or <= comparisons. */
2207 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2208 && (code2 == LT_EXPR || code2 == LE_EXPR))
2210 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2211 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2212 else
2213 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2216 /* Likewise chose the more restrictive of two > or >= comparisons. */
2217 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2218 && (code2 == GT_EXPR || code2 == GE_EXPR))
2220 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2221 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2222 else
2223 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2226 /* Check for singleton ranges. */
2227 else if (cmp == 0
2228 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2229 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2230 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2232 /* Check for disjoint ranges. */
2233 else if (cmp <= 0
2234 && (code1 == LT_EXPR || code1 == LE_EXPR)
2235 && (code2 == GT_EXPR || code2 == GE_EXPR))
2236 return boolean_false_node;
2237 else if (cmp >= 0
2238 && (code1 == GT_EXPR || code1 == GE_EXPR)
2239 && (code2 == LT_EXPR || code2 == LE_EXPR))
2240 return boolean_false_node;
2243 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2244 NAME's definition is a truth value. See if there are any simplifications
2245 that can be done against the NAME's definition. */
2246 if (TREE_CODE (op1a) == SSA_NAME
2247 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2248 && (integer_zerop (op1b) || integer_onep (op1b)))
2250 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2251 || (code1 == NE_EXPR && integer_onep (op1b)));
2252 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2253 switch (gimple_code (stmt))
2255 case GIMPLE_ASSIGN:
2256 /* Try to simplify by copy-propagating the definition. */
2257 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2259 case GIMPLE_PHI:
2260 /* If every argument to the PHI produces the same result when
2261 ANDed with the second comparison, we win.
2262 Do not do this unless the type is bool since we need a bool
2263 result here anyway. */
2264 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2266 tree result = NULL_TREE;
2267 unsigned i;
2268 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2270 tree arg = gimple_phi_arg_def (stmt, i);
2272 /* If this PHI has itself as an argument, ignore it.
2273 If all the other args produce the same result,
2274 we're still OK. */
2275 if (arg == gimple_phi_result (stmt))
2276 continue;
2277 else if (TREE_CODE (arg) == INTEGER_CST)
2279 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2281 if (!result)
2282 result = boolean_false_node;
2283 else if (!integer_zerop (result))
2284 return NULL_TREE;
2286 else if (!result)
2287 result = fold_build2 (code2, boolean_type_node,
2288 op2a, op2b);
2289 else if (!same_bool_comparison_p (result,
2290 code2, op2a, op2b))
2291 return NULL_TREE;
2293 else if (TREE_CODE (arg) == SSA_NAME
2294 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2296 tree temp;
2297 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2298 /* In simple cases we can look through PHI nodes,
2299 but we have to be careful with loops.
2300 See PR49073. */
2301 if (! dom_info_available_p (CDI_DOMINATORS)
2302 || gimple_bb (def_stmt) == gimple_bb (stmt)
2303 || dominated_by_p (CDI_DOMINATORS,
2304 gimple_bb (def_stmt),
2305 gimple_bb (stmt)))
2306 return NULL_TREE;
2307 temp = and_var_with_comparison (arg, invert, code2,
2308 op2a, op2b);
2309 if (!temp)
2310 return NULL_TREE;
2311 else if (!result)
2312 result = temp;
2313 else if (!same_bool_result_p (result, temp))
2314 return NULL_TREE;
2316 else
2317 return NULL_TREE;
2319 return result;
2322 default:
2323 break;
2326 return NULL_TREE;
2329 /* Try to simplify the AND of two comparisons, specified by
2330 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2331 If this can be simplified to a single expression (without requiring
2332 introducing more SSA variables to hold intermediate values),
2333 return the resulting tree. Otherwise return NULL_TREE.
2334 If the result expression is non-null, it has boolean type. */
2336 tree
2337 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2338 enum tree_code code2, tree op2a, tree op2b)
2340 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2341 if (t)
2342 return t;
2343 else
2344 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2347 /* Helper function for or_comparisons_1: try to simplify the OR of the
2348 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2349 If INVERT is true, invert the value of VAR before doing the OR.
2350 Return NULL_EXPR if we can't simplify this to a single expression. */
2352 static tree
2353 or_var_with_comparison (tree var, bool invert,
2354 enum tree_code code2, tree op2a, tree op2b)
2356 tree t;
2357 gimple stmt = SSA_NAME_DEF_STMT (var);
2359 /* We can only deal with variables whose definitions are assignments. */
2360 if (!is_gimple_assign (stmt))
2361 return NULL_TREE;
2363 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2364 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2365 Then we only have to consider the simpler non-inverted cases. */
2366 if (invert)
2367 t = and_var_with_comparison_1 (stmt,
2368 invert_tree_comparison (code2, false),
2369 op2a, op2b);
2370 else
2371 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2372 return canonicalize_bool (t, invert);
2375 /* Try to simplify the OR of the ssa variable defined by the assignment
2376 STMT with the comparison specified by (OP2A CODE2 OP2B).
2377 Return NULL_EXPR if we can't simplify this to a single expression. */
2379 static tree
2380 or_var_with_comparison_1 (gimple stmt,
2381 enum tree_code code2, tree op2a, tree op2b)
2383 tree var = gimple_assign_lhs (stmt);
2384 tree true_test_var = NULL_TREE;
2385 tree false_test_var = NULL_TREE;
2386 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2388 /* Check for identities like (var OR (var != 0)) => true . */
2389 if (TREE_CODE (op2a) == SSA_NAME
2390 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2392 if ((code2 == NE_EXPR && integer_zerop (op2b))
2393 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2395 true_test_var = op2a;
2396 if (var == true_test_var)
2397 return var;
2399 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2400 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2402 false_test_var = op2a;
2403 if (var == false_test_var)
2404 return boolean_true_node;
2408 /* If the definition is a comparison, recurse on it. */
2409 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2411 tree t = or_comparisons_1 (innercode,
2412 gimple_assign_rhs1 (stmt),
2413 gimple_assign_rhs2 (stmt),
2414 code2,
2415 op2a,
2416 op2b);
2417 if (t)
2418 return t;
2421 /* If the definition is an AND or OR expression, we may be able to
2422 simplify by reassociating. */
2423 if (innercode == TRUTH_AND_EXPR
2424 || innercode == TRUTH_OR_EXPR
2425 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2426 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2428 tree inner1 = gimple_assign_rhs1 (stmt);
2429 tree inner2 = gimple_assign_rhs2 (stmt);
2430 gimple s;
2431 tree t;
2432 tree partial = NULL_TREE;
2433 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2435 /* Check for boolean identities that don't require recursive examination
2436 of inner1/inner2:
2437 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2438 inner1 OR (inner1 AND inner2) => inner1
2439 !inner1 OR (inner1 OR inner2) => true
2440 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2442 if (inner1 == true_test_var)
2443 return (is_or ? var : inner1);
2444 else if (inner2 == true_test_var)
2445 return (is_or ? var : inner2);
2446 else if (inner1 == false_test_var)
2447 return (is_or
2448 ? boolean_true_node
2449 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2450 else if (inner2 == false_test_var)
2451 return (is_or
2452 ? boolean_true_node
2453 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2455 /* Next, redistribute/reassociate the OR across the inner tests.
2456 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2457 if (TREE_CODE (inner1) == SSA_NAME
2458 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2459 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2460 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2461 gimple_assign_rhs1 (s),
2462 gimple_assign_rhs2 (s),
2463 code2, op2a, op2b)))
2465 /* Handle the OR case, where we are reassociating:
2466 (inner1 OR inner2) OR (op2a code2 op2b)
2467 => (t OR inner2)
2468 If the partial result t is a constant, we win. Otherwise
2469 continue on to try reassociating with the other inner test. */
2470 if (is_or)
2472 if (integer_onep (t))
2473 return boolean_true_node;
2474 else if (integer_zerop (t))
2475 return inner2;
2478 /* Handle the AND case, where we are redistributing:
2479 (inner1 AND inner2) OR (op2a code2 op2b)
2480 => (t AND (inner2 OR (op2a code op2b))) */
2481 else if (integer_zerop (t))
2482 return boolean_false_node;
2484 /* Save partial result for later. */
2485 partial = t;
2488 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2489 if (TREE_CODE (inner2) == SSA_NAME
2490 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2491 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2492 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2493 gimple_assign_rhs1 (s),
2494 gimple_assign_rhs2 (s),
2495 code2, op2a, op2b)))
2497 /* Handle the OR case, where we are reassociating:
2498 (inner1 OR inner2) OR (op2a code2 op2b)
2499 => (inner1 OR t)
2500 => (t OR partial) */
2501 if (is_or)
2503 if (integer_zerop (t))
2504 return inner1;
2505 else if (integer_onep (t))
2506 return boolean_true_node;
2507 /* If both are the same, we can apply the identity
2508 (x OR x) == x. */
2509 else if (partial && same_bool_result_p (t, partial))
2510 return t;
2513 /* Handle the AND case, where we are redistributing:
2514 (inner1 AND inner2) OR (op2a code2 op2b)
2515 => (t AND (inner1 OR (op2a code2 op2b)))
2516 => (t AND partial) */
2517 else
2519 if (integer_zerop (t))
2520 return boolean_false_node;
2521 else if (partial)
2523 /* We already got a simplification for the other
2524 operand to the redistributed AND expression. The
2525 interesting case is when at least one is true.
2526 Or, if both are the same, we can apply the identity
2527 (x AND x) == x. */
2528 if (integer_onep (partial))
2529 return t;
2530 else if (integer_onep (t))
2531 return partial;
2532 else if (same_bool_result_p (t, partial))
2533 return t;
2538 return NULL_TREE;
2541 /* Try to simplify the OR of two comparisons defined by
2542 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2543 If this can be done without constructing an intermediate value,
2544 return the resulting tree; otherwise NULL_TREE is returned.
2545 This function is deliberately asymmetric as it recurses on SSA_DEFs
2546 in the first comparison but not the second. */
2548 static tree
2549 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2550 enum tree_code code2, tree op2a, tree op2b)
2552 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2553 if (operand_equal_p (op1a, op2a, 0)
2554 && operand_equal_p (op1b, op2b, 0))
2556 tree t = combine_comparisons (UNKNOWN_LOCATION,
2557 TRUTH_ORIF_EXPR, code1, code2,
2558 boolean_type_node, op1a, op1b);
2559 if (t)
2560 return t;
2563 /* Likewise the swapped case of the above. */
2564 if (operand_equal_p (op1a, op2b, 0)
2565 && operand_equal_p (op1b, op2a, 0))
2567 tree t = combine_comparisons (UNKNOWN_LOCATION,
2568 TRUTH_ORIF_EXPR, code1,
2569 swap_tree_comparison (code2),
2570 boolean_type_node, op1a, op1b);
2571 if (t)
2572 return t;
2575 /* If both comparisons are of the same value against constants, we might
2576 be able to merge them. */
2577 if (operand_equal_p (op1a, op2a, 0)
2578 && TREE_CODE (op1b) == INTEGER_CST
2579 && TREE_CODE (op2b) == INTEGER_CST)
2581 int cmp = tree_int_cst_compare (op1b, op2b);
2583 /* If we have (op1a != op1b), we should either be able to
2584 return that or TRUE, depending on whether the constant op1b
2585 also satisfies the other comparison against op2b. */
2586 if (code1 == NE_EXPR)
2588 bool done = true;
2589 bool val;
2590 switch (code2)
2592 case EQ_EXPR: val = (cmp == 0); break;
2593 case NE_EXPR: val = (cmp != 0); break;
2594 case LT_EXPR: val = (cmp < 0); break;
2595 case GT_EXPR: val = (cmp > 0); break;
2596 case LE_EXPR: val = (cmp <= 0); break;
2597 case GE_EXPR: val = (cmp >= 0); break;
2598 default: done = false;
2600 if (done)
2602 if (val)
2603 return boolean_true_node;
2604 else
2605 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2608 /* Likewise if the second comparison is a != comparison. */
2609 else if (code2 == NE_EXPR)
2611 bool done = true;
2612 bool val;
2613 switch (code1)
2615 case EQ_EXPR: val = (cmp == 0); break;
2616 case NE_EXPR: val = (cmp != 0); break;
2617 case LT_EXPR: val = (cmp > 0); break;
2618 case GT_EXPR: val = (cmp < 0); break;
2619 case LE_EXPR: val = (cmp >= 0); break;
2620 case GE_EXPR: val = (cmp <= 0); break;
2621 default: done = false;
2623 if (done)
2625 if (val)
2626 return boolean_true_node;
2627 else
2628 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2632 /* See if an equality test is redundant with the other comparison. */
2633 else if (code1 == EQ_EXPR)
2635 bool val;
2636 switch (code2)
2638 case EQ_EXPR: val = (cmp == 0); break;
2639 case NE_EXPR: val = (cmp != 0); break;
2640 case LT_EXPR: val = (cmp < 0); break;
2641 case GT_EXPR: val = (cmp > 0); break;
2642 case LE_EXPR: val = (cmp <= 0); break;
2643 case GE_EXPR: val = (cmp >= 0); break;
2644 default:
2645 val = false;
2647 if (val)
2648 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2650 else if (code2 == EQ_EXPR)
2652 bool val;
2653 switch (code1)
2655 case EQ_EXPR: val = (cmp == 0); break;
2656 case NE_EXPR: val = (cmp != 0); break;
2657 case LT_EXPR: val = (cmp > 0); break;
2658 case GT_EXPR: val = (cmp < 0); break;
2659 case LE_EXPR: val = (cmp >= 0); break;
2660 case GE_EXPR: val = (cmp <= 0); break;
2661 default:
2662 val = false;
2664 if (val)
2665 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2668 /* Chose the less restrictive of two < or <= comparisons. */
2669 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2670 && (code2 == LT_EXPR || code2 == LE_EXPR))
2672 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2673 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2674 else
2675 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2678 /* Likewise chose the less restrictive of two > or >= comparisons. */
2679 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2680 && (code2 == GT_EXPR || code2 == GE_EXPR))
2682 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2683 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2684 else
2685 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2688 /* Check for singleton ranges. */
2689 else if (cmp == 0
2690 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2691 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2692 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2694 /* Check for less/greater pairs that don't restrict the range at all. */
2695 else if (cmp >= 0
2696 && (code1 == LT_EXPR || code1 == LE_EXPR)
2697 && (code2 == GT_EXPR || code2 == GE_EXPR))
2698 return boolean_true_node;
2699 else if (cmp <= 0
2700 && (code1 == GT_EXPR || code1 == GE_EXPR)
2701 && (code2 == LT_EXPR || code2 == LE_EXPR))
2702 return boolean_true_node;
2705 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2706 NAME's definition is a truth value. See if there are any simplifications
2707 that can be done against the NAME's definition. */
2708 if (TREE_CODE (op1a) == SSA_NAME
2709 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2710 && (integer_zerop (op1b) || integer_onep (op1b)))
2712 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2713 || (code1 == NE_EXPR && integer_onep (op1b)));
2714 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2715 switch (gimple_code (stmt))
2717 case GIMPLE_ASSIGN:
2718 /* Try to simplify by copy-propagating the definition. */
2719 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2721 case GIMPLE_PHI:
2722 /* If every argument to the PHI produces the same result when
2723 ORed with the second comparison, we win.
2724 Do not do this unless the type is bool since we need a bool
2725 result here anyway. */
2726 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2728 tree result = NULL_TREE;
2729 unsigned i;
2730 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2732 tree arg = gimple_phi_arg_def (stmt, i);
2734 /* If this PHI has itself as an argument, ignore it.
2735 If all the other args produce the same result,
2736 we're still OK. */
2737 if (arg == gimple_phi_result (stmt))
2738 continue;
2739 else if (TREE_CODE (arg) == INTEGER_CST)
2741 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2743 if (!result)
2744 result = boolean_true_node;
2745 else if (!integer_onep (result))
2746 return NULL_TREE;
2748 else if (!result)
2749 result = fold_build2 (code2, boolean_type_node,
2750 op2a, op2b);
2751 else if (!same_bool_comparison_p (result,
2752 code2, op2a, op2b))
2753 return NULL_TREE;
2755 else if (TREE_CODE (arg) == SSA_NAME
2756 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2758 tree temp;
2759 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2760 /* In simple cases we can look through PHI nodes,
2761 but we have to be careful with loops.
2762 See PR49073. */
2763 if (! dom_info_available_p (CDI_DOMINATORS)
2764 || gimple_bb (def_stmt) == gimple_bb (stmt)
2765 || dominated_by_p (CDI_DOMINATORS,
2766 gimple_bb (def_stmt),
2767 gimple_bb (stmt)))
2768 return NULL_TREE;
2769 temp = or_var_with_comparison (arg, invert, code2,
2770 op2a, op2b);
2771 if (!temp)
2772 return NULL_TREE;
2773 else if (!result)
2774 result = temp;
2775 else if (!same_bool_result_p (result, temp))
2776 return NULL_TREE;
2778 else
2779 return NULL_TREE;
2781 return result;
2784 default:
2785 break;
2788 return NULL_TREE;
2791 /* Try to simplify the OR of two comparisons, specified by
2792 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2793 If this can be simplified to a single expression (without requiring
2794 introducing more SSA variables to hold intermediate values),
2795 return the resulting tree. Otherwise return NULL_TREE.
2796 If the result expression is non-null, it has boolean type. */
2798 tree
2799 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2800 enum tree_code code2, tree op2a, tree op2b)
2802 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2803 if (t)
2804 return t;
2805 else
2806 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2810 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2812 Either NULL_TREE, a simplified but non-constant or a constant
2813 is returned.
2815 ??? This should go into a gimple-fold-inline.h file to be eventually
2816 privatized with the single valueize function used in the various TUs
2817 to avoid the indirect function call overhead. */
2819 tree
2820 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2822 location_t loc = gimple_location (stmt);
2823 switch (gimple_code (stmt))
2825 case GIMPLE_ASSIGN:
2827 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2829 switch (get_gimple_rhs_class (subcode))
2831 case GIMPLE_SINGLE_RHS:
2833 tree rhs = gimple_assign_rhs1 (stmt);
2834 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2836 if (TREE_CODE (rhs) == SSA_NAME)
2838 /* If the RHS is an SSA_NAME, return its known constant value,
2839 if any. */
2840 return (*valueize) (rhs);
2842 /* Handle propagating invariant addresses into address
2843 operations. */
2844 else if (TREE_CODE (rhs) == ADDR_EXPR
2845 && !is_gimple_min_invariant (rhs))
2847 HOST_WIDE_INT offset;
2848 tree base;
2849 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2850 &offset,
2851 valueize);
2852 if (base
2853 && (CONSTANT_CLASS_P (base)
2854 || decl_address_invariant_p (base)))
2855 return build_invariant_address (TREE_TYPE (rhs),
2856 base, offset);
2858 else if (TREE_CODE (rhs) == CONSTRUCTOR
2859 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2860 && (CONSTRUCTOR_NELTS (rhs)
2861 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2863 unsigned i;
2864 tree val, list;
2866 list = NULL_TREE;
2867 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2869 val = (*valueize) (val);
2870 if (TREE_CODE (val) == INTEGER_CST
2871 || TREE_CODE (val) == REAL_CST
2872 || TREE_CODE (val) == FIXED_CST)
2873 list = tree_cons (NULL_TREE, val, list);
2874 else
2875 return NULL_TREE;
2878 return build_vector (TREE_TYPE (rhs), nreverse (list));
2881 if (kind == tcc_reference)
2883 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2884 || TREE_CODE (rhs) == REALPART_EXPR
2885 || TREE_CODE (rhs) == IMAGPART_EXPR)
2886 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2888 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2889 return fold_unary_loc (EXPR_LOCATION (rhs),
2890 TREE_CODE (rhs),
2891 TREE_TYPE (rhs), val);
2893 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2894 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2896 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2897 return fold_ternary_loc (EXPR_LOCATION (rhs),
2898 TREE_CODE (rhs),
2899 TREE_TYPE (rhs), val,
2900 TREE_OPERAND (rhs, 1),
2901 TREE_OPERAND (rhs, 2));
2903 else if (TREE_CODE (rhs) == MEM_REF
2904 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2906 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2907 if (TREE_CODE (val) == ADDR_EXPR
2908 && is_gimple_min_invariant (val))
2910 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2911 unshare_expr (val),
2912 TREE_OPERAND (rhs, 1));
2913 if (tem)
2914 rhs = tem;
2917 return fold_const_aggregate_ref_1 (rhs, valueize);
2919 else if (kind == tcc_declaration)
2920 return get_symbol_constant_value (rhs);
2921 return rhs;
2924 case GIMPLE_UNARY_RHS:
2926 /* Handle unary operators that can appear in GIMPLE form.
2927 Note that we know the single operand must be a constant,
2928 so this should almost always return a simplified RHS. */
2929 tree lhs = gimple_assign_lhs (stmt);
2930 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2932 /* Conversions are useless for CCP purposes if they are
2933 value-preserving. Thus the restrictions that
2934 useless_type_conversion_p places for pointer type conversions
2935 do not apply here. Substitution later will only substitute to
2936 allowed places. */
2937 if (CONVERT_EXPR_CODE_P (subcode)
2938 && POINTER_TYPE_P (TREE_TYPE (lhs))
2939 && POINTER_TYPE_P (TREE_TYPE (op0)))
2941 tree tem;
2942 /* Try to re-construct array references on-the-fly. */
2943 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2944 TREE_TYPE (op0))
2945 && ((tem = maybe_fold_offset_to_address
2946 (loc,
2947 op0, integer_zero_node, TREE_TYPE (lhs)))
2948 != NULL_TREE))
2949 return tem;
2950 return op0;
2953 return
2954 fold_unary_ignore_overflow_loc (loc, subcode,
2955 gimple_expr_type (stmt), op0);
2958 case GIMPLE_BINARY_RHS:
2960 /* Handle binary operators that can appear in GIMPLE form. */
2961 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2962 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2964 /* Translate &x + CST into an invariant form suitable for
2965 further propagation. */
2966 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2967 && TREE_CODE (op0) == ADDR_EXPR
2968 && TREE_CODE (op1) == INTEGER_CST)
2970 tree off = fold_convert (ptr_type_node, op1);
2971 return build_fold_addr_expr
2972 (fold_build2 (MEM_REF,
2973 TREE_TYPE (TREE_TYPE (op0)),
2974 unshare_expr (op0), off));
2977 return fold_binary_loc (loc, subcode,
2978 gimple_expr_type (stmt), op0, op1);
2981 case GIMPLE_TERNARY_RHS:
2983 /* Handle ternary operators that can appear in GIMPLE form. */
2984 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2985 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2986 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2988 return fold_ternary_loc (loc, subcode,
2989 gimple_expr_type (stmt), op0, op1, op2);
2992 default:
2993 gcc_unreachable ();
2997 case GIMPLE_CALL:
2999 tree fn;
3001 if (gimple_call_internal_p (stmt))
3002 /* No folding yet for these functions. */
3003 return NULL_TREE;
3005 fn = (*valueize) (gimple_call_fn (stmt));
3006 if (TREE_CODE (fn) == ADDR_EXPR
3007 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3008 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
3010 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
3011 tree call, retval;
3012 unsigned i;
3013 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3014 args[i] = (*valueize) (gimple_call_arg (stmt, i));
3015 call = build_call_array_loc (loc,
3016 gimple_call_return_type (stmt),
3017 fn, gimple_call_num_args (stmt), args);
3018 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
3019 if (retval)
3020 /* fold_call_expr wraps the result inside a NOP_EXPR. */
3021 STRIP_NOPS (retval);
3022 return retval;
3024 return NULL_TREE;
3027 default:
3028 return NULL_TREE;
3032 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
3033 Returns NULL_TREE if folding to a constant is not possible, otherwise
3034 returns a constant according to is_gimple_min_invariant. */
3036 tree
3037 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
3039 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
3040 if (res && is_gimple_min_invariant (res))
3041 return res;
3042 return NULL_TREE;
3046 /* The following set of functions are supposed to fold references using
3047 their constant initializers. */
3049 static tree fold_ctor_reference (tree type, tree ctor,
3050 unsigned HOST_WIDE_INT offset,
3051 unsigned HOST_WIDE_INT size);
3053 /* See if we can find constructor defining value of BASE.
3054 When we know the consructor with constant offset (such as
3055 base is array[40] and we do know constructor of array), then
3056 BIT_OFFSET is adjusted accordingly.
3058 As a special case, return error_mark_node when constructor
3059 is not explicitly available, but it is known to be zero
3060 such as 'static const int a;'. */
3061 static tree
3062 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
3063 tree (*valueize)(tree))
3065 HOST_WIDE_INT bit_offset2, size, max_size;
3066 if (TREE_CODE (base) == MEM_REF)
3068 if (!integer_zerop (TREE_OPERAND (base, 1)))
3070 if (!host_integerp (TREE_OPERAND (base, 1), 0))
3071 return NULL_TREE;
3072 *bit_offset += (mem_ref_offset (base).low
3073 * BITS_PER_UNIT);
3076 if (valueize
3077 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3078 base = valueize (TREE_OPERAND (base, 0));
3079 if (!base || TREE_CODE (base) != ADDR_EXPR)
3080 return NULL_TREE;
3081 base = TREE_OPERAND (base, 0);
3084 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
3085 DECL_INITIAL. If BASE is a nested reference into another
3086 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
3087 the inner reference. */
3088 switch (TREE_CODE (base))
3090 case VAR_DECL:
3091 if (!const_value_known_p (base))
3092 return NULL_TREE;
3094 /* Fallthru. */
3095 case CONST_DECL:
3096 if (!DECL_INITIAL (base)
3097 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
3098 return error_mark_node;
3099 return DECL_INITIAL (base);
3101 case ARRAY_REF:
3102 case COMPONENT_REF:
3103 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
3104 if (max_size == -1 || size != max_size)
3105 return NULL_TREE;
3106 *bit_offset += bit_offset2;
3107 return get_base_constructor (base, bit_offset, valueize);
3109 case STRING_CST:
3110 case CONSTRUCTOR:
3111 return base;
3113 default:
3114 return NULL_TREE;
3118 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
3119 to the memory at bit OFFSET.
3121 We do only simple job of folding byte accesses. */
3123 static tree
3124 fold_string_cst_ctor_reference (tree type, tree ctor,
3125 unsigned HOST_WIDE_INT offset,
3126 unsigned HOST_WIDE_INT size)
3128 if (INTEGRAL_TYPE_P (type)
3129 && (TYPE_MODE (type)
3130 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3131 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3132 == MODE_INT)
3133 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
3134 && size == BITS_PER_UNIT
3135 && !(offset % BITS_PER_UNIT))
3137 offset /= BITS_PER_UNIT;
3138 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
3139 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
3140 [offset]));
3141 /* Folding
3142 const char a[20]="hello";
3143 return a[10];
3145 might lead to offset greater than string length. In this case we
3146 know value is either initialized to 0 or out of bounds. Return 0
3147 in both cases. */
3148 return build_zero_cst (type);
3150 return NULL_TREE;
3153 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3154 SIZE to the memory at bit OFFSET. */
3156 static tree
3157 fold_array_ctor_reference (tree type, tree ctor,
3158 unsigned HOST_WIDE_INT offset,
3159 unsigned HOST_WIDE_INT size)
3161 unsigned HOST_WIDE_INT cnt;
3162 tree cfield, cval;
3163 double_int low_bound, elt_size;
3164 double_int index, max_index;
3165 double_int access_index;
3166 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
3167 HOST_WIDE_INT inner_offset;
3169 /* Compute low bound and elt size. */
3170 if (domain_type && TYPE_MIN_VALUE (domain_type))
3172 /* Static constructors for variably sized objects makes no sense. */
3173 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
3174 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
3176 else
3177 low_bound = double_int_zero;
3178 /* Static constructors for variably sized objects makes no sense. */
3179 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
3180 == INTEGER_CST);
3181 elt_size =
3182 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
3185 /* We can handle only constantly sized accesses that are known to not
3186 be larger than size of array element. */
3187 if (!TYPE_SIZE_UNIT (type)
3188 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
3189 || double_int_cmp (elt_size,
3190 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
3191 return NULL_TREE;
3193 /* Compute the array index we look for. */
3194 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
3195 elt_size, TRUNC_DIV_EXPR);
3196 access_index = double_int_add (access_index, low_bound);
3198 /* And offset within the access. */
3199 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
3201 /* See if the array field is large enough to span whole access. We do not
3202 care to fold accesses spanning multiple array indexes. */
3203 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
3204 return NULL_TREE;
3206 index = double_int_sub (low_bound, double_int_one);
3207 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
3209 /* Array constructor might explicitely set index, or specify range
3210 or leave index NULL meaning that it is next index after previous
3211 one. */
3212 if (cfield)
3214 if (TREE_CODE (cfield) == INTEGER_CST)
3215 max_index = index = tree_to_double_int (cfield);
3216 else
3218 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3219 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3220 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3223 else
3224 max_index = index = double_int_add (index, double_int_one);
3226 /* Do we have match? */
3227 if (double_int_cmp (access_index, index, 1) >= 0
3228 && double_int_cmp (access_index, max_index, 1) <= 0)
3229 return fold_ctor_reference (type, cval, inner_offset, size);
3231 /* When memory is not explicitely mentioned in constructor,
3232 it is 0 (or out of range). */
3233 return build_zero_cst (type);
3236 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3237 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3239 static tree
3240 fold_nonarray_ctor_reference (tree type, tree ctor,
3241 unsigned HOST_WIDE_INT offset,
3242 unsigned HOST_WIDE_INT size)
3244 unsigned HOST_WIDE_INT cnt;
3245 tree cfield, cval;
3247 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3248 cval)
3250 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3251 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3252 tree field_size = DECL_SIZE (cfield);
3253 double_int bitoffset;
3254 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3255 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
3256 double_int bitoffset_end;
3258 /* Variable sized objects in static constructors makes no sense,
3259 but field_size can be NULL for flexible array members. */
3260 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3261 && TREE_CODE (byte_offset) == INTEGER_CST
3262 && (field_size != NULL_TREE
3263 ? TREE_CODE (field_size) == INTEGER_CST
3264 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3266 /* Compute bit offset of the field. */
3267 bitoffset = double_int_add (tree_to_double_int (field_offset),
3268 double_int_mul (byte_offset_cst,
3269 bits_per_unit_cst));
3270 /* Compute bit offset where the field ends. */
3271 if (field_size != NULL_TREE)
3272 bitoffset_end = double_int_add (bitoffset,
3273 tree_to_double_int (field_size));
3274 else
3275 bitoffset_end = double_int_zero;
3277 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
3278 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
3279 && (field_size == NULL_TREE
3280 || double_int_cmp (uhwi_to_double_int (offset),
3281 bitoffset_end, 0) < 0))
3283 double_int access_end = double_int_add (uhwi_to_double_int (offset),
3284 uhwi_to_double_int (size));
3285 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
3286 bitoffset);
3287 /* We do have overlap. Now see if field is large enough to
3288 cover the access. Give up for accesses spanning multiple
3289 fields. */
3290 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
3291 return NULL_TREE;
3292 return fold_ctor_reference (type, cval,
3293 double_int_to_uhwi (inner_offset), size);
3296 /* When memory is not explicitely mentioned in constructor, it is 0. */
3297 return build_zero_cst (type);
3300 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3301 to the memory at bit OFFSET. */
3303 static tree
3304 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3305 unsigned HOST_WIDE_INT size)
3307 tree ret;
3309 /* We found the field with exact match. */
3310 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3311 && !offset)
3312 return canonicalize_constructor_val (ctor);
3314 /* We are at the end of walk, see if we can view convert the
3315 result. */
3316 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3317 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3318 && operand_equal_p (TYPE_SIZE (type),
3319 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3321 ret = canonicalize_constructor_val (ctor);
3322 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3323 if (ret)
3324 STRIP_NOPS (ret);
3325 return ret;
3327 if (TREE_CODE (ctor) == STRING_CST)
3328 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3329 if (TREE_CODE (ctor) == CONSTRUCTOR)
3332 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
3333 return fold_array_ctor_reference (type, ctor, offset, size);
3334 else
3335 return fold_nonarray_ctor_reference (type, ctor, offset, size);
3338 return NULL_TREE;
3341 /* Return the tree representing the element referenced by T if T is an
3342 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3343 names using VALUEIZE. Return NULL_TREE otherwise. */
3345 tree
3346 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3348 tree ctor, idx, base;
3349 HOST_WIDE_INT offset, size, max_size;
3350 tree tem;
3352 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3353 return get_symbol_constant_value (t);
3355 tem = fold_read_from_constant_string (t);
3356 if (tem)
3357 return tem;
3359 switch (TREE_CODE (t))
3361 case ARRAY_REF:
3362 case ARRAY_RANGE_REF:
3363 /* Constant indexes are handled well by get_base_constructor.
3364 Only special case variable offsets.
3365 FIXME: This code can't handle nested references with variable indexes
3366 (they will be handled only by iteration of ccp). Perhaps we can bring
3367 get_ref_base_and_extent here and make it use a valueize callback. */
3368 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3369 && valueize
3370 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3371 && host_integerp (idx, 0))
3373 tree low_bound, unit_size;
3375 /* If the resulting bit-offset is constant, track it. */
3376 if ((low_bound = array_ref_low_bound (t),
3377 host_integerp (low_bound, 0))
3378 && (unit_size = array_ref_element_size (t),
3379 host_integerp (unit_size, 1)))
3381 offset = TREE_INT_CST_LOW (idx);
3382 offset -= TREE_INT_CST_LOW (low_bound);
3383 offset *= TREE_INT_CST_LOW (unit_size);
3384 offset *= BITS_PER_UNIT;
3386 base = TREE_OPERAND (t, 0);
3387 ctor = get_base_constructor (base, &offset, valueize);
3388 /* Empty constructor. Always fold to 0. */
3389 if (ctor == error_mark_node)
3390 return build_zero_cst (TREE_TYPE (t));
3391 /* Out of bound array access. Value is undefined,
3392 but don't fold. */
3393 if (offset < 0)
3394 return NULL_TREE;
3395 /* We can not determine ctor. */
3396 if (!ctor)
3397 return NULL_TREE;
3398 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3399 TREE_INT_CST_LOW (unit_size)
3400 * BITS_PER_UNIT);
3403 /* Fallthru. */
3405 case COMPONENT_REF:
3406 case BIT_FIELD_REF:
3407 case TARGET_MEM_REF:
3408 case MEM_REF:
3409 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3410 ctor = get_base_constructor (base, &offset, valueize);
3412 /* Empty constructor. Always fold to 0. */
3413 if (ctor == error_mark_node)
3414 return build_zero_cst (TREE_TYPE (t));
3415 /* We do not know precise address. */
3416 if (max_size == -1 || max_size != size)
3417 return NULL_TREE;
3418 /* We can not determine ctor. */
3419 if (!ctor)
3420 return NULL_TREE;
3422 /* Out of bound array access. Value is undefined, but don't fold. */
3423 if (offset < 0)
3424 return NULL_TREE;
3426 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3428 case REALPART_EXPR:
3429 case IMAGPART_EXPR:
3431 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3432 if (c && TREE_CODE (c) == COMPLEX_CST)
3433 return fold_build1_loc (EXPR_LOCATION (t),
3434 TREE_CODE (t), TREE_TYPE (t), c);
3435 break;
3438 default:
3439 break;
3442 return NULL_TREE;
3445 tree
3446 fold_const_aggregate_ref (tree t)
3448 return fold_const_aggregate_ref_1 (t, NULL);
3451 /* Return true iff VAL is a gimple expression that is known to be
3452 non-negative. Restricted to floating-point inputs. */
3454 bool
3455 gimple_val_nonnegative_real_p (tree val)
3457 gimple def_stmt;
3459 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3461 /* Use existing logic for non-gimple trees. */
3462 if (tree_expr_nonnegative_p (val))
3463 return true;
3465 if (TREE_CODE (val) != SSA_NAME)
3466 return false;
3468 /* Currently we look only at the immediately defining statement
3469 to make this determination, since recursion on defining
3470 statements of operands can lead to quadratic behavior in the
3471 worst case. This is expected to catch almost all occurrences
3472 in practice. It would be possible to implement limited-depth
3473 recursion if important cases are lost. Alternatively, passes
3474 that need this information (such as the pow/powi lowering code
3475 in the cse_sincos pass) could be revised to provide it through
3476 dataflow propagation. */
3478 def_stmt = SSA_NAME_DEF_STMT (val);
3480 if (is_gimple_assign (def_stmt))
3482 tree op0, op1;
3484 /* See fold-const.c:tree_expr_nonnegative_p for additional
3485 cases that could be handled with recursion. */
3487 switch (gimple_assign_rhs_code (def_stmt))
3489 case ABS_EXPR:
3490 /* Always true for floating-point operands. */
3491 return true;
3493 case MULT_EXPR:
3494 /* True if the two operands are identical (since we are
3495 restricted to floating-point inputs). */
3496 op0 = gimple_assign_rhs1 (def_stmt);
3497 op1 = gimple_assign_rhs2 (def_stmt);
3499 if (op0 == op1
3500 || operand_equal_p (op0, op1, 0))
3501 return true;
3503 default:
3504 return false;
3507 else if (is_gimple_call (def_stmt))
3509 tree fndecl = gimple_call_fndecl (def_stmt);
3510 if (fndecl
3511 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3513 tree arg1;
3515 switch (DECL_FUNCTION_CODE (fndecl))
3517 CASE_FLT_FN (BUILT_IN_ACOS):
3518 CASE_FLT_FN (BUILT_IN_ACOSH):
3519 CASE_FLT_FN (BUILT_IN_CABS):
3520 CASE_FLT_FN (BUILT_IN_COSH):
3521 CASE_FLT_FN (BUILT_IN_ERFC):
3522 CASE_FLT_FN (BUILT_IN_EXP):
3523 CASE_FLT_FN (BUILT_IN_EXP10):
3524 CASE_FLT_FN (BUILT_IN_EXP2):
3525 CASE_FLT_FN (BUILT_IN_FABS):
3526 CASE_FLT_FN (BUILT_IN_FDIM):
3527 CASE_FLT_FN (BUILT_IN_HYPOT):
3528 CASE_FLT_FN (BUILT_IN_POW10):
3529 return true;
3531 CASE_FLT_FN (BUILT_IN_SQRT):
3532 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3533 nonnegative inputs. */
3534 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3535 return true;
3537 break;
3539 CASE_FLT_FN (BUILT_IN_POWI):
3540 /* True if the second argument is an even integer. */
3541 arg1 = gimple_call_arg (def_stmt, 1);
3543 if (TREE_CODE (arg1) == INTEGER_CST
3544 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3545 return true;
3547 break;
3549 CASE_FLT_FN (BUILT_IN_POW):
3550 /* True if the second argument is an even integer-valued
3551 real. */
3552 arg1 = gimple_call_arg (def_stmt, 1);
3554 if (TREE_CODE (arg1) == REAL_CST)
3556 REAL_VALUE_TYPE c;
3557 HOST_WIDE_INT n;
3559 c = TREE_REAL_CST (arg1);
3560 n = real_to_integer (&c);
3562 if ((n & 1) == 0)
3564 REAL_VALUE_TYPE cint;
3565 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3566 if (real_identical (&c, &cint))
3567 return true;
3571 break;
3573 default:
3574 return false;
3579 return false;