mutex (timed_mutex, [...]): Update to use steady_clock instead of monotonic_clock.
[official-gcc.git] / gcc / gimple-fold.c
blobd1c5c89546ff797f51e82d6f6732e55e3017aebe
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
109 return true;
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
115 tree
116 canonicalize_constructor_val (tree cval)
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
121 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
122 TREE_OPERAND (cval, 0),
123 TREE_OPERAND (cval, 1),
124 TREE_TYPE (cval));
125 if (t)
126 cval = t;
128 if (TREE_CODE (cval) == ADDR_EXPR)
130 tree base = get_base_address (TREE_OPERAND (cval, 0));
132 if (base
133 && (TREE_CODE (base) == VAR_DECL
134 || TREE_CODE (base) == FUNCTION_DECL)
135 && !can_refer_decl_in_current_unit_p (base))
136 return NULL_TREE;
137 if (cfun && base && TREE_CODE (base) == VAR_DECL)
138 add_referenced_var (base);
139 /* Fixup types in global initializers. */
140 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
141 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
143 return cval;
146 /* If SYM is a constant variable with known value, return the value.
147 NULL_TREE is returned otherwise. */
149 tree
150 get_symbol_constant_value (tree sym)
152 if (const_value_known_p (sym))
154 tree val = DECL_INITIAL (sym);
155 if (val)
157 val = canonicalize_constructor_val (val);
158 if (val && is_gimple_min_invariant (val))
159 return val;
160 else
161 return NULL_TREE;
163 /* Variables declared 'const' without an initializer
164 have zero as the initializer if they may not be
165 overridden at link or run time. */
166 if (!val
167 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
168 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
169 return build_zero_cst (TREE_TYPE (sym));
172 return NULL_TREE;
176 /* Return true if we may propagate the address expression ADDR into the
177 dereference DEREF and cancel them. */
179 bool
180 may_propagate_address_into_dereference (tree addr, tree deref)
182 gcc_assert (TREE_CODE (deref) == MEM_REF
183 && TREE_CODE (addr) == ADDR_EXPR);
185 /* Don't propagate if ADDR's operand has incomplete type. */
186 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
187 return false;
189 /* If the address is invariant then we do not need to preserve restrict
190 qualifications. But we do need to preserve volatile qualifiers until
191 we can annotate the folded dereference itself properly. */
192 if (is_gimple_min_invariant (addr)
193 && (!TREE_THIS_VOLATILE (deref)
194 || TYPE_VOLATILE (TREE_TYPE (addr))))
195 return useless_type_conversion_p (TREE_TYPE (deref),
196 TREE_TYPE (TREE_OPERAND (addr, 0)));
198 /* Else both the address substitution and the folding must result in
199 a valid useless type conversion sequence. */
200 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
201 TREE_TYPE (addr))
202 && useless_type_conversion_p (TREE_TYPE (deref),
203 TREE_TYPE (TREE_OPERAND (addr, 0))));
207 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
208 BASE is an array type. OFFSET is a byte displacement.
210 LOC is the location of the original expression. */
212 static tree
213 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
215 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
216 tree array_type, elt_type, elt_size;
217 tree domain_type;
219 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
220 measured in units of the size of elements type) from that ARRAY_REF).
221 We can't do anything if either is variable.
223 The case we handle here is *(&A[N]+O). */
224 if (TREE_CODE (base) == ARRAY_REF)
226 tree low_bound = array_ref_low_bound (base);
228 elt_offset = TREE_OPERAND (base, 1);
229 if (TREE_CODE (low_bound) != INTEGER_CST
230 || TREE_CODE (elt_offset) != INTEGER_CST)
231 return NULL_TREE;
233 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound);
234 base = TREE_OPERAND (base, 0);
237 /* Ignore stupid user tricks of indexing non-array variables. */
238 array_type = TREE_TYPE (base);
239 if (TREE_CODE (array_type) != ARRAY_TYPE)
240 return NULL_TREE;
241 elt_type = TREE_TYPE (array_type);
243 /* Use signed size type for intermediate computation on the index. */
244 idx_type = ssizetype;
246 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
247 element type (so we can use the alignment if it's not constant).
248 Otherwise, compute the offset as an index by using a division. If the
249 division isn't exact, then don't do anything. */
250 elt_size = TYPE_SIZE_UNIT (elt_type);
251 if (!elt_size)
252 return NULL;
253 if (integer_zerop (offset))
255 if (TREE_CODE (elt_size) != INTEGER_CST)
256 elt_size = size_int (TYPE_ALIGN (elt_type));
258 idx = build_int_cst (idx_type, 0);
260 else
262 unsigned HOST_WIDE_INT lquo, lrem;
263 HOST_WIDE_INT hquo, hrem;
264 double_int soffset;
266 /* The final array offset should be signed, so we need
267 to sign-extend the (possibly pointer) offset here
268 and use signed division. */
269 soffset = double_int_sext (tree_to_double_int (offset),
270 TYPE_PRECISION (TREE_TYPE (offset)));
271 if (TREE_CODE (elt_size) != INTEGER_CST
272 || div_and_round_double (TRUNC_DIV_EXPR, 0,
273 soffset.low, soffset.high,
274 TREE_INT_CST_LOW (elt_size),
275 TREE_INT_CST_HIGH (elt_size),
276 &lquo, &hquo, &lrem, &hrem)
277 || lrem || hrem)
278 return NULL_TREE;
280 idx = build_int_cst_wide (idx_type, lquo, hquo);
283 /* Assume the low bound is zero. If there is a domain type, get the
284 low bound, if any, convert the index into that type, and add the
285 low bound. */
286 min_idx = build_int_cst (idx_type, 0);
287 domain_type = TYPE_DOMAIN (array_type);
288 if (domain_type)
290 idx_type = domain_type;
291 if (TYPE_MIN_VALUE (idx_type))
292 min_idx = TYPE_MIN_VALUE (idx_type);
293 else
294 min_idx = fold_convert (idx_type, min_idx);
296 if (TREE_CODE (min_idx) != INTEGER_CST)
297 return NULL_TREE;
299 elt_offset = fold_convert (idx_type, elt_offset);
302 if (!integer_zerop (min_idx))
303 idx = int_const_binop (PLUS_EXPR, idx, min_idx);
304 if (!integer_zerop (elt_offset))
305 idx = int_const_binop (PLUS_EXPR, idx, elt_offset);
307 /* Make sure to possibly truncate late after offsetting. */
308 idx = fold_convert (idx_type, idx);
310 /* We don't want to construct access past array bounds. For example
311 char *(c[4]);
312 c[3][2];
313 should not be simplified into (*c)[14] or tree-vrp will
314 give false warnings.
315 This is only an issue for multi-dimensional arrays. */
316 if (TREE_CODE (elt_type) == ARRAY_TYPE
317 && domain_type)
319 if (TYPE_MAX_VALUE (domain_type)
320 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
321 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
322 return NULL_TREE;
323 else if (TYPE_MIN_VALUE (domain_type)
324 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
325 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
326 return NULL_TREE;
327 else if (compare_tree_int (idx, 0) < 0)
328 return NULL_TREE;
332 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
333 SET_EXPR_LOCATION (t, loc);
334 return t;
339 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
340 LOC is the location of original expression.
342 Before attempting the conversion strip off existing ADDR_EXPRs. */
344 tree
345 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
346 tree orig_type)
348 tree ret;
350 STRIP_NOPS (base);
351 if (TREE_CODE (base) != ADDR_EXPR)
352 return NULL_TREE;
354 base = TREE_OPERAND (base, 0);
355 if (types_compatible_p (orig_type, TREE_TYPE (base))
356 && integer_zerop (offset))
357 return base;
359 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
360 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
361 return ret;
362 return NULL_TREE;
365 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
366 LOC is the location of the original expression. */
368 tree
369 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
370 tree orig_type)
372 tree base, ret;
374 STRIP_NOPS (addr);
375 if (TREE_CODE (addr) != ADDR_EXPR)
376 return NULL_TREE;
377 base = TREE_OPERAND (addr, 0);
378 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
379 if (ret)
381 ret = build_fold_addr_expr (ret);
382 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
383 return NULL_TREE;
384 SET_EXPR_LOCATION (ret, loc);
387 return ret;
391 /* A quaint feature extant in our address arithmetic is that there
392 can be hidden type changes here. The type of the result need
393 not be the same as the type of the input pointer.
395 What we're after here is an expression of the form
396 (T *)(&array + const)
397 where array is OP0, const is OP1, RES_TYPE is T and
398 the cast doesn't actually exist, but is implicit in the
399 type of the POINTER_PLUS_EXPR. We'd like to turn this into
400 &array[x]
401 which may be able to propagate further. */
403 tree
404 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
406 tree ptd_type;
407 tree t;
409 /* The first operand should be an ADDR_EXPR. */
410 if (TREE_CODE (op0) != ADDR_EXPR)
411 return NULL_TREE;
412 op0 = TREE_OPERAND (op0, 0);
414 /* It had better be a constant. */
415 if (TREE_CODE (op1) != INTEGER_CST)
417 /* Or op0 should now be A[0] and the non-constant offset defined
418 via a multiplication by the array element size. */
419 if (TREE_CODE (op0) == ARRAY_REF
420 /* As we will end up creating a variable index array access
421 in the outermost array dimension make sure there isn't
422 a more inner array that the index could overflow to. */
423 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
424 && integer_zerop (TREE_OPERAND (op0, 1))
425 && TREE_CODE (op1) == SSA_NAME)
427 gimple offset_def = SSA_NAME_DEF_STMT (op1);
428 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
429 if (!host_integerp (elsz, 1)
430 || !is_gimple_assign (offset_def))
431 return NULL_TREE;
433 /* Do not build array references of something that we can't
434 see the true number of array dimensions for. */
435 if (!DECL_P (TREE_OPERAND (op0, 0))
436 && !handled_component_p (TREE_OPERAND (op0, 0)))
437 return NULL_TREE;
439 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
440 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
441 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
442 return build_fold_addr_expr
443 (build4 (ARRAY_REF, TREE_TYPE (op0),
444 TREE_OPERAND (op0, 0),
445 gimple_assign_rhs1 (offset_def),
446 TREE_OPERAND (op0, 2),
447 TREE_OPERAND (op0, 3)));
448 else if (integer_onep (elsz)
449 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
450 return build_fold_addr_expr
451 (build4 (ARRAY_REF, TREE_TYPE (op0),
452 TREE_OPERAND (op0, 0),
453 op1,
454 TREE_OPERAND (op0, 2),
455 TREE_OPERAND (op0, 3)));
457 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
458 /* Dto. */
459 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
460 && TREE_CODE (op1) == SSA_NAME)
462 gimple offset_def = SSA_NAME_DEF_STMT (op1);
463 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
464 if (!host_integerp (elsz, 1)
465 || !is_gimple_assign (offset_def))
466 return NULL_TREE;
468 /* Do not build array references of something that we can't
469 see the true number of array dimensions for. */
470 if (!DECL_P (op0)
471 && !handled_component_p (op0))
472 return NULL_TREE;
474 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
475 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
476 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
477 return build_fold_addr_expr
478 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
479 op0, gimple_assign_rhs1 (offset_def),
480 integer_zero_node, NULL_TREE));
481 else if (integer_onep (elsz)
482 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
483 return build_fold_addr_expr
484 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
485 op0, op1,
486 integer_zero_node, NULL_TREE));
489 return NULL_TREE;
492 /* If the first operand is an ARRAY_REF, expand it so that we can fold
493 the offset into it. */
494 while (TREE_CODE (op0) == ARRAY_REF)
496 tree array_obj = TREE_OPERAND (op0, 0);
497 tree array_idx = TREE_OPERAND (op0, 1);
498 tree elt_type = TREE_TYPE (op0);
499 tree elt_size = TYPE_SIZE_UNIT (elt_type);
500 tree min_idx;
502 if (TREE_CODE (array_idx) != INTEGER_CST)
503 break;
504 if (TREE_CODE (elt_size) != INTEGER_CST)
505 break;
507 /* Un-bias the index by the min index of the array type. */
508 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
509 if (min_idx)
511 min_idx = TYPE_MIN_VALUE (min_idx);
512 if (min_idx)
514 if (TREE_CODE (min_idx) != INTEGER_CST)
515 break;
517 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
518 if (!integer_zerop (min_idx))
519 array_idx = int_const_binop (MINUS_EXPR, array_idx,
520 min_idx);
524 /* Convert the index to a byte offset. */
525 array_idx = fold_convert (sizetype, array_idx);
526 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size);
528 /* Update the operands for the next round, or for folding. */
529 op1 = int_const_binop (PLUS_EXPR,
530 array_idx, op1);
531 op0 = array_obj;
534 ptd_type = TREE_TYPE (res_type);
535 /* If we want a pointer to void, reconstruct the reference from the
536 array element type. A pointer to that can be trivially converted
537 to void *. This happens as we fold (void *)(ptr p+ off). */
538 if (VOID_TYPE_P (ptd_type)
539 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
540 ptd_type = TREE_TYPE (TREE_TYPE (op0));
542 /* At which point we can try some of the same things as for indirects. */
543 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
544 if (t)
546 t = build_fold_addr_expr (t);
547 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
548 return NULL_TREE;
549 SET_EXPR_LOCATION (t, loc);
552 return t;
555 /* Subroutine of fold_stmt. We perform several simplifications of the
556 memory reference tree EXPR and make sure to re-gimplify them properly
557 after propagation of constant addresses. IS_LHS is true if the
558 reference is supposed to be an lvalue. */
560 static tree
561 maybe_fold_reference (tree expr, bool is_lhs)
563 tree *t = &expr;
564 tree result;
566 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
567 || TREE_CODE (expr) == REALPART_EXPR
568 || TREE_CODE (expr) == IMAGPART_EXPR)
569 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
570 return fold_unary_loc (EXPR_LOCATION (expr),
571 TREE_CODE (expr),
572 TREE_TYPE (expr),
573 TREE_OPERAND (expr, 0));
574 else if (TREE_CODE (expr) == BIT_FIELD_REF
575 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
576 return fold_ternary_loc (EXPR_LOCATION (expr),
577 TREE_CODE (expr),
578 TREE_TYPE (expr),
579 TREE_OPERAND (expr, 0),
580 TREE_OPERAND (expr, 1),
581 TREE_OPERAND (expr, 2));
583 while (handled_component_p (*t))
584 t = &TREE_OPERAND (*t, 0);
586 /* Canonicalize MEM_REFs invariant address operand. Do this first
587 to avoid feeding non-canonical MEM_REFs elsewhere. */
588 if (TREE_CODE (*t) == MEM_REF
589 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
591 bool volatile_p = TREE_THIS_VOLATILE (*t);
592 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
593 TREE_OPERAND (*t, 0),
594 TREE_OPERAND (*t, 1));
595 if (tem)
597 TREE_THIS_VOLATILE (tem) = volatile_p;
598 *t = tem;
599 tem = maybe_fold_reference (expr, is_lhs);
600 if (tem)
601 return tem;
602 return expr;
606 if (!is_lhs
607 && (result = fold_const_aggregate_ref (expr))
608 && is_gimple_min_invariant (result))
609 return result;
611 /* Fold back MEM_REFs to reference trees. */
612 if (TREE_CODE (*t) == MEM_REF
613 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
614 && integer_zerop (TREE_OPERAND (*t, 1))
615 && (TREE_THIS_VOLATILE (*t)
616 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
617 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
618 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
619 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
620 /* We have to look out here to not drop a required conversion
621 from the rhs to the lhs if is_lhs, but we don't have the
622 rhs here to verify that. Thus require strict type
623 compatibility. */
624 && types_compatible_p (TREE_TYPE (*t),
625 TREE_TYPE (TREE_OPERAND
626 (TREE_OPERAND (*t, 0), 0))))
628 tree tem;
629 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
630 tem = maybe_fold_reference (expr, is_lhs);
631 if (tem)
632 return tem;
633 return expr;
635 else if (TREE_CODE (*t) == TARGET_MEM_REF)
637 tree tem = maybe_fold_tmr (*t);
638 if (tem)
640 *t = tem;
641 tem = maybe_fold_reference (expr, is_lhs);
642 if (tem)
643 return tem;
644 return expr;
648 return NULL_TREE;
652 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
653 replacement rhs for the statement or NULL_TREE if no simplification
654 could be made. It is assumed that the operands have been previously
655 folded. */
657 static tree
658 fold_gimple_assign (gimple_stmt_iterator *si)
660 gimple stmt = gsi_stmt (*si);
661 enum tree_code subcode = gimple_assign_rhs_code (stmt);
662 location_t loc = gimple_location (stmt);
664 tree result = NULL_TREE;
666 switch (get_gimple_rhs_class (subcode))
668 case GIMPLE_SINGLE_RHS:
670 tree rhs = gimple_assign_rhs1 (stmt);
672 /* Try to fold a conditional expression. */
673 if (TREE_CODE (rhs) == COND_EXPR)
675 tree op0 = COND_EXPR_COND (rhs);
676 tree tem;
677 bool set = false;
678 location_t cond_loc = EXPR_LOCATION (rhs);
680 if (COMPARISON_CLASS_P (op0))
682 fold_defer_overflow_warnings ();
683 tem = fold_binary_loc (cond_loc,
684 TREE_CODE (op0), TREE_TYPE (op0),
685 TREE_OPERAND (op0, 0),
686 TREE_OPERAND (op0, 1));
687 /* This is actually a conditional expression, not a GIMPLE
688 conditional statement, however, the valid_gimple_rhs_p
689 test still applies. */
690 set = (tem && is_gimple_condexpr (tem)
691 && valid_gimple_rhs_p (tem));
692 fold_undefer_overflow_warnings (set, stmt, 0);
694 else if (is_gimple_min_invariant (op0))
696 tem = op0;
697 set = true;
699 else
700 return NULL_TREE;
702 if (set)
703 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
704 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
707 else if (REFERENCE_CLASS_P (rhs))
708 return maybe_fold_reference (rhs, false);
710 else if (TREE_CODE (rhs) == ADDR_EXPR)
712 tree ref = TREE_OPERAND (rhs, 0);
713 tree tem = maybe_fold_reference (ref, true);
714 if (tem
715 && TREE_CODE (tem) == MEM_REF
716 && integer_zerop (TREE_OPERAND (tem, 1)))
717 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
718 else if (tem)
719 result = fold_convert (TREE_TYPE (rhs),
720 build_fold_addr_expr_loc (loc, tem));
721 else if (TREE_CODE (ref) == MEM_REF
722 && integer_zerop (TREE_OPERAND (ref, 1)))
723 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
726 else if (TREE_CODE (rhs) == CONSTRUCTOR
727 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
728 && (CONSTRUCTOR_NELTS (rhs)
729 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
731 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
732 unsigned i;
733 tree val;
735 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
736 if (TREE_CODE (val) != INTEGER_CST
737 && TREE_CODE (val) != REAL_CST
738 && TREE_CODE (val) != FIXED_CST)
739 return NULL_TREE;
741 return build_vector_from_ctor (TREE_TYPE (rhs),
742 CONSTRUCTOR_ELTS (rhs));
745 else if (DECL_P (rhs))
746 return unshare_expr (get_symbol_constant_value (rhs));
748 /* If we couldn't fold the RHS, hand over to the generic
749 fold routines. */
750 if (result == NULL_TREE)
751 result = fold (rhs);
753 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
754 that may have been added by fold, and "useless" type
755 conversions that might now be apparent due to propagation. */
756 STRIP_USELESS_TYPE_CONVERSION (result);
758 if (result != rhs && valid_gimple_rhs_p (result))
759 return result;
761 return NULL_TREE;
763 break;
765 case GIMPLE_UNARY_RHS:
767 tree rhs = gimple_assign_rhs1 (stmt);
769 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
770 if (result)
772 /* If the operation was a conversion do _not_ mark a
773 resulting constant with TREE_OVERFLOW if the original
774 constant was not. These conversions have implementation
775 defined behavior and retaining the TREE_OVERFLOW flag
776 here would confuse later passes such as VRP. */
777 if (CONVERT_EXPR_CODE_P (subcode)
778 && TREE_CODE (result) == INTEGER_CST
779 && TREE_CODE (rhs) == INTEGER_CST)
780 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
782 STRIP_USELESS_TYPE_CONVERSION (result);
783 if (valid_gimple_rhs_p (result))
784 return result;
786 else if (CONVERT_EXPR_CODE_P (subcode)
787 && POINTER_TYPE_P (gimple_expr_type (stmt))
788 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
790 tree type = gimple_expr_type (stmt);
791 tree t = maybe_fold_offset_to_address (loc,
792 gimple_assign_rhs1 (stmt),
793 integer_zero_node, type);
794 if (t)
795 return t;
798 break;
800 case GIMPLE_BINARY_RHS:
801 /* Try to fold pointer addition. */
802 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
804 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
805 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
807 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
808 if (!useless_type_conversion_p
809 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
810 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
812 result = maybe_fold_stmt_addition (gimple_location (stmt),
813 type,
814 gimple_assign_rhs1 (stmt),
815 gimple_assign_rhs2 (stmt));
818 if (!result)
819 result = fold_binary_loc (loc, subcode,
820 TREE_TYPE (gimple_assign_lhs (stmt)),
821 gimple_assign_rhs1 (stmt),
822 gimple_assign_rhs2 (stmt));
824 if (result)
826 STRIP_USELESS_TYPE_CONVERSION (result);
827 if (valid_gimple_rhs_p (result))
828 return result;
830 break;
832 case GIMPLE_TERNARY_RHS:
833 result = fold_ternary_loc (loc, subcode,
834 TREE_TYPE (gimple_assign_lhs (stmt)),
835 gimple_assign_rhs1 (stmt),
836 gimple_assign_rhs2 (stmt),
837 gimple_assign_rhs3 (stmt));
839 if (result)
841 STRIP_USELESS_TYPE_CONVERSION (result);
842 if (valid_gimple_rhs_p (result))
843 return result;
845 break;
847 case GIMPLE_INVALID_RHS:
848 gcc_unreachable ();
851 return NULL_TREE;
854 /* Attempt to fold a conditional statement. Return true if any changes were
855 made. We only attempt to fold the condition expression, and do not perform
856 any transformation that would require alteration of the cfg. It is
857 assumed that the operands have been previously folded. */
859 static bool
860 fold_gimple_cond (gimple stmt)
862 tree result = fold_binary_loc (gimple_location (stmt),
863 gimple_cond_code (stmt),
864 boolean_type_node,
865 gimple_cond_lhs (stmt),
866 gimple_cond_rhs (stmt));
868 if (result)
870 STRIP_USELESS_TYPE_CONVERSION (result);
871 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
873 gimple_cond_set_condition_from_tree (stmt, result);
874 return true;
878 return false;
881 /* Convert EXPR into a GIMPLE value suitable for substitution on the
882 RHS of an assignment. Insert the necessary statements before
883 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
884 is replaced. If the call is expected to produces a result, then it
885 is replaced by an assignment of the new RHS to the result variable.
886 If the result is to be ignored, then the call is replaced by a
887 GIMPLE_NOP. A proper VDEF chain is retained by making the first
888 VUSE and the last VDEF of the whole sequence be the same as the replaced
889 statement and using new SSA names for stores in between. */
891 void
892 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
894 tree lhs;
895 tree tmp = NULL_TREE; /* Silence warning. */
896 gimple stmt, new_stmt;
897 gimple_stmt_iterator i;
898 gimple_seq stmts = gimple_seq_alloc();
899 struct gimplify_ctx gctx;
900 gimple last = NULL;
901 gimple laststore = NULL;
902 tree reaching_vuse;
904 stmt = gsi_stmt (*si_p);
906 gcc_assert (is_gimple_call (stmt));
908 lhs = gimple_call_lhs (stmt);
909 reaching_vuse = gimple_vuse (stmt);
911 push_gimplify_context (&gctx);
913 if (lhs == NULL_TREE)
915 gimplify_and_add (expr, &stmts);
916 /* We can end up with folding a memcpy of an empty class assignment
917 which gets optimized away by C++ gimplification. */
918 if (gimple_seq_empty_p (stmts))
920 pop_gimplify_context (NULL);
921 if (gimple_in_ssa_p (cfun))
923 unlink_stmt_vdef (stmt);
924 release_defs (stmt);
926 gsi_remove (si_p, true);
927 return;
930 else
931 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
933 pop_gimplify_context (NULL);
935 if (gimple_has_location (stmt))
936 annotate_all_with_location (stmts, gimple_location (stmt));
938 /* The replacement can expose previously unreferenced variables. */
939 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
941 if (last)
943 gsi_insert_before (si_p, last, GSI_NEW_STMT);
944 gsi_next (si_p);
946 new_stmt = gsi_stmt (i);
947 if (gimple_in_ssa_p (cfun))
949 find_new_referenced_vars (new_stmt);
950 mark_symbols_for_renaming (new_stmt);
952 /* If the new statement has a VUSE, update it with exact SSA name we
953 know will reach this one. */
954 if (gimple_vuse (new_stmt))
956 /* If we've also seen a previous store create a new VDEF for
957 the latter one, and make that the new reaching VUSE. */
958 if (laststore)
960 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
961 gimple_set_vdef (laststore, reaching_vuse);
962 update_stmt (laststore);
963 laststore = NULL;
965 gimple_set_vuse (new_stmt, reaching_vuse);
966 gimple_set_modified (new_stmt, true);
968 if (gimple_assign_single_p (new_stmt)
969 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
971 laststore = new_stmt;
973 last = new_stmt;
976 if (lhs == NULL_TREE)
978 /* If we replace a call without LHS that has a VDEF and our new
979 sequence ends with a store we must make that store have the same
980 vdef in order not to break the sequencing. This can happen
981 for instance when folding memcpy calls into assignments. */
982 if (gimple_vdef (stmt) && laststore)
984 gimple_set_vdef (laststore, gimple_vdef (stmt));
985 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
986 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
987 update_stmt (laststore);
989 else if (gimple_in_ssa_p (cfun))
991 unlink_stmt_vdef (stmt);
992 release_defs (stmt);
994 new_stmt = last;
996 else
998 if (last)
1000 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1001 gsi_next (si_p);
1003 if (laststore && is_gimple_reg (lhs))
1005 gimple_set_vdef (laststore, gimple_vdef (stmt));
1006 update_stmt (laststore);
1007 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1008 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1009 laststore = NULL;
1011 else if (laststore)
1013 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1014 gimple_set_vdef (laststore, reaching_vuse);
1015 update_stmt (laststore);
1016 laststore = NULL;
1018 new_stmt = gimple_build_assign (lhs, tmp);
1019 if (!is_gimple_reg (tmp))
1020 gimple_set_vuse (new_stmt, reaching_vuse);
1021 if (!is_gimple_reg (lhs))
1023 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1024 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1025 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1027 else if (reaching_vuse == gimple_vuse (stmt))
1028 unlink_stmt_vdef (stmt);
1031 gimple_set_location (new_stmt, gimple_location (stmt));
1032 gsi_replace (si_p, new_stmt, false);
1035 /* Return the string length, maximum string length or maximum value of
1036 ARG in LENGTH.
1037 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1038 is not NULL and, for TYPE == 0, its value is not equal to the length
1039 we determine or if we are unable to determine the length or value,
1040 return false. VISITED is a bitmap of visited variables.
1041 TYPE is 0 if string length should be returned, 1 for maximum string
1042 length and 2 for maximum value ARG can have. */
1044 static bool
1045 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1047 tree var, val;
1048 gimple def_stmt;
1050 if (TREE_CODE (arg) != SSA_NAME)
1052 if (TREE_CODE (arg) == COND_EXPR)
1053 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1054 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1055 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1056 else if (TREE_CODE (arg) == ADDR_EXPR
1057 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1058 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1060 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1061 if (TREE_CODE (aop0) == INDIRECT_REF
1062 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1063 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1064 length, visited, type);
1067 if (type == 2)
1069 val = arg;
1070 if (TREE_CODE (val) != INTEGER_CST
1071 || tree_int_cst_sgn (val) < 0)
1072 return false;
1074 else
1075 val = c_strlen (arg, 1);
1076 if (!val)
1077 return false;
1079 if (*length)
1081 if (type > 0)
1083 if (TREE_CODE (*length) != INTEGER_CST
1084 || TREE_CODE (val) != INTEGER_CST)
1085 return false;
1087 if (tree_int_cst_lt (*length, val))
1088 *length = val;
1089 return true;
1091 else if (simple_cst_equal (val, *length) != 1)
1092 return false;
1095 *length = val;
1096 return true;
1099 /* If we were already here, break the infinite cycle. */
1100 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1101 return true;
1103 var = arg;
1104 def_stmt = SSA_NAME_DEF_STMT (var);
1106 switch (gimple_code (def_stmt))
1108 case GIMPLE_ASSIGN:
1109 /* The RHS of the statement defining VAR must either have a
1110 constant length or come from another SSA_NAME with a constant
1111 length. */
1112 if (gimple_assign_single_p (def_stmt)
1113 || gimple_assign_unary_nop_p (def_stmt))
1115 tree rhs = gimple_assign_rhs1 (def_stmt);
1116 return get_maxval_strlen (rhs, length, visited, type);
1118 return false;
1120 case GIMPLE_PHI:
1122 /* All the arguments of the PHI node must have the same constant
1123 length. */
1124 unsigned i;
1126 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1128 tree arg = gimple_phi_arg (def_stmt, i)->def;
1130 /* If this PHI has itself as an argument, we cannot
1131 determine the string length of this argument. However,
1132 if we can find a constant string length for the other
1133 PHI args then we can still be sure that this is a
1134 constant string length. So be optimistic and just
1135 continue with the next argument. */
1136 if (arg == gimple_phi_result (def_stmt))
1137 continue;
1139 if (!get_maxval_strlen (arg, length, visited, type))
1140 return false;
1143 return true;
1145 default:
1146 return false;
1151 /* Fold builtin call in statement STMT. Returns a simplified tree.
1152 We may return a non-constant expression, including another call
1153 to a different function and with different arguments, e.g.,
1154 substituting memcpy for strcpy when the string length is known.
1155 Note that some builtins expand into inline code that may not
1156 be valid in GIMPLE. Callers must take care. */
1158 tree
1159 gimple_fold_builtin (gimple stmt)
1161 tree result, val[3];
1162 tree callee, a;
1163 int arg_idx, type;
1164 bitmap visited;
1165 bool ignore;
1166 int nargs;
1167 location_t loc = gimple_location (stmt);
1169 gcc_assert (is_gimple_call (stmt));
1171 ignore = (gimple_call_lhs (stmt) == NULL);
1173 /* First try the generic builtin folder. If that succeeds, return the
1174 result directly. */
1175 result = fold_call_stmt (stmt, ignore);
1176 if (result)
1178 if (ignore)
1179 STRIP_NOPS (result);
1180 return result;
1183 /* Ignore MD builtins. */
1184 callee = gimple_call_fndecl (stmt);
1185 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1186 return NULL_TREE;
1188 /* If the builtin could not be folded, and it has no argument list,
1189 we're done. */
1190 nargs = gimple_call_num_args (stmt);
1191 if (nargs == 0)
1192 return NULL_TREE;
1194 /* Limit the work only for builtins we know how to simplify. */
1195 switch (DECL_FUNCTION_CODE (callee))
1197 case BUILT_IN_STRLEN:
1198 case BUILT_IN_FPUTS:
1199 case BUILT_IN_FPUTS_UNLOCKED:
1200 arg_idx = 0;
1201 type = 0;
1202 break;
1203 case BUILT_IN_STRCPY:
1204 case BUILT_IN_STRNCPY:
1205 arg_idx = 1;
1206 type = 0;
1207 break;
1208 case BUILT_IN_MEMCPY_CHK:
1209 case BUILT_IN_MEMPCPY_CHK:
1210 case BUILT_IN_MEMMOVE_CHK:
1211 case BUILT_IN_MEMSET_CHK:
1212 case BUILT_IN_STRNCPY_CHK:
1213 arg_idx = 2;
1214 type = 2;
1215 break;
1216 case BUILT_IN_STRCPY_CHK:
1217 case BUILT_IN_STPCPY_CHK:
1218 arg_idx = 1;
1219 type = 1;
1220 break;
1221 case BUILT_IN_SNPRINTF_CHK:
1222 case BUILT_IN_VSNPRINTF_CHK:
1223 arg_idx = 1;
1224 type = 2;
1225 break;
1226 default:
1227 return NULL_TREE;
1230 if (arg_idx >= nargs)
1231 return NULL_TREE;
1233 /* Try to use the dataflow information gathered by the CCP process. */
1234 visited = BITMAP_ALLOC (NULL);
1235 bitmap_clear (visited);
1237 memset (val, 0, sizeof (val));
1238 a = gimple_call_arg (stmt, arg_idx);
1239 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1240 val[arg_idx] = NULL_TREE;
1242 BITMAP_FREE (visited);
1244 result = NULL_TREE;
1245 switch (DECL_FUNCTION_CODE (callee))
1247 case BUILT_IN_STRLEN:
1248 if (val[0] && nargs == 1)
1250 tree new_val =
1251 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1253 /* If the result is not a valid gimple value, or not a cast
1254 of a valid gimple value, then we cannot use the result. */
1255 if (is_gimple_val (new_val)
1256 || (CONVERT_EXPR_P (new_val)
1257 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1258 return new_val;
1260 break;
1262 case BUILT_IN_STRCPY:
1263 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1264 result = fold_builtin_strcpy (loc, callee,
1265 gimple_call_arg (stmt, 0),
1266 gimple_call_arg (stmt, 1),
1267 val[1]);
1268 break;
1270 case BUILT_IN_STRNCPY:
1271 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1272 result = fold_builtin_strncpy (loc, callee,
1273 gimple_call_arg (stmt, 0),
1274 gimple_call_arg (stmt, 1),
1275 gimple_call_arg (stmt, 2),
1276 val[1]);
1277 break;
1279 case BUILT_IN_FPUTS:
1280 if (nargs == 2)
1281 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1282 gimple_call_arg (stmt, 1),
1283 ignore, false, val[0]);
1284 break;
1286 case BUILT_IN_FPUTS_UNLOCKED:
1287 if (nargs == 2)
1288 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1289 gimple_call_arg (stmt, 1),
1290 ignore, true, val[0]);
1291 break;
1293 case BUILT_IN_MEMCPY_CHK:
1294 case BUILT_IN_MEMPCPY_CHK:
1295 case BUILT_IN_MEMMOVE_CHK:
1296 case BUILT_IN_MEMSET_CHK:
1297 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1298 result = fold_builtin_memory_chk (loc, callee,
1299 gimple_call_arg (stmt, 0),
1300 gimple_call_arg (stmt, 1),
1301 gimple_call_arg (stmt, 2),
1302 gimple_call_arg (stmt, 3),
1303 val[2], ignore,
1304 DECL_FUNCTION_CODE (callee));
1305 break;
1307 case BUILT_IN_STRCPY_CHK:
1308 case BUILT_IN_STPCPY_CHK:
1309 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1310 result = fold_builtin_stxcpy_chk (loc, callee,
1311 gimple_call_arg (stmt, 0),
1312 gimple_call_arg (stmt, 1),
1313 gimple_call_arg (stmt, 2),
1314 val[1], ignore,
1315 DECL_FUNCTION_CODE (callee));
1316 break;
1318 case BUILT_IN_STRNCPY_CHK:
1319 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1320 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1321 gimple_call_arg (stmt, 1),
1322 gimple_call_arg (stmt, 2),
1323 gimple_call_arg (stmt, 3),
1324 val[2]);
1325 break;
1327 case BUILT_IN_SNPRINTF_CHK:
1328 case BUILT_IN_VSNPRINTF_CHK:
1329 if (val[1] && is_gimple_val (val[1]))
1330 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1331 DECL_FUNCTION_CODE (callee));
1332 break;
1334 default:
1335 gcc_unreachable ();
1338 if (result && ignore)
1339 result = fold_ignored_result (result);
1340 return result;
1343 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1344 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1345 KNOWN_BINFO carries the binfo describing the true type of
1346 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1347 with a this adjustment, the constant which should be added to this pointer
1348 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1349 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1351 tree
1352 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
1353 tree *delta)
1355 HOST_WIDE_INT i;
1356 tree v, fndecl;
1358 v = BINFO_VIRTUALS (known_binfo);
1359 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1360 if (!v)
1361 return NULL_TREE;
1362 i = 0;
1363 while (i != token)
1365 i += (TARGET_VTABLE_USES_DESCRIPTORS
1366 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1367 v = TREE_CHAIN (v);
1370 /* If BV_VCALL_INDEX is non-NULL, give up. */
1371 if (TREE_TYPE (v))
1372 return NULL_TREE;
1374 fndecl = TREE_VALUE (v);
1376 /* When cgraph node is missing and function is not public, we cannot
1377 devirtualize. This can happen in WHOPR when the actual method
1378 ends up in other partition, because we found devirtualization
1379 possibility too late. */
1380 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1381 return NULL_TREE;
1383 *delta = TREE_PURPOSE (v);
1384 gcc_checking_assert (host_integerp (*delta, 0));
1385 return fndecl;
1388 /* Generate code adjusting the first parameter of a call statement determined
1389 by GSI by DELTA. */
1391 void
1392 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1394 gimple call_stmt = gsi_stmt (*gsi);
1395 tree parm, tmp;
1396 gimple new_stmt;
1398 delta = fold_convert (sizetype, delta);
1399 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1400 parm = gimple_call_arg (call_stmt, 0);
1401 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1402 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1403 add_referenced_var (tmp);
1405 tmp = make_ssa_name (tmp, NULL);
1406 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1407 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1408 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1409 gimple_call_set_arg (call_stmt, 0, tmp);
1412 /* Return a binfo to be used for devirtualization of calls based on an object
1413 represented by a declaration (i.e. a global or automatically allocated one)
1414 or NULL if it cannot be found or is not safe. CST is expected to be an
1415 ADDR_EXPR of such object or the function will return NULL. Currently it is
1416 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1418 tree
1419 gimple_extract_devirt_binfo_from_cst (tree cst)
1421 HOST_WIDE_INT offset, size, max_size;
1422 tree base, type, expected_type, binfo;
1423 bool last_artificial = false;
1425 if (!flag_devirtualize
1426 || TREE_CODE (cst) != ADDR_EXPR
1427 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1428 return NULL_TREE;
1430 cst = TREE_OPERAND (cst, 0);
1431 expected_type = TREE_TYPE (cst);
1432 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1433 type = TREE_TYPE (base);
1434 if (!DECL_P (base)
1435 || max_size == -1
1436 || max_size != size
1437 || TREE_CODE (type) != RECORD_TYPE)
1438 return NULL_TREE;
1440 /* Find the sub-object the constant actually refers to and mark whether it is
1441 an artificial one (as opposed to a user-defined one). */
1442 while (true)
1444 HOST_WIDE_INT pos, size;
1445 tree fld;
1447 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1448 break;
1449 if (offset < 0)
1450 return NULL_TREE;
1452 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1454 if (TREE_CODE (fld) != FIELD_DECL)
1455 continue;
1457 pos = int_bit_position (fld);
1458 size = tree_low_cst (DECL_SIZE (fld), 1);
1459 if (pos <= offset && (pos + size) > offset)
1460 break;
1462 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1463 return NULL_TREE;
1465 last_artificial = DECL_ARTIFICIAL (fld);
1466 type = TREE_TYPE (fld);
1467 offset -= pos;
1469 /* Artifical sub-objects are ancestors, we do not want to use them for
1470 devirtualization, at least not here. */
1471 if (last_artificial)
1472 return NULL_TREE;
1473 binfo = TYPE_BINFO (type);
1474 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1475 return NULL_TREE;
1476 else
1477 return binfo;
1480 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1481 The statement may be replaced by another statement, e.g., if the call
1482 simplifies to a constant value. Return true if any changes were made.
1483 It is assumed that the operands have been previously folded. */
1485 bool
1486 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1488 gimple stmt = gsi_stmt (*gsi);
1489 tree callee;
1491 /* Check for builtins that CCP can handle using information not
1492 available in the generic fold routines. */
1493 callee = gimple_call_fndecl (stmt);
1494 if (!inplace && callee && DECL_BUILT_IN (callee))
1496 tree result = gimple_fold_builtin (stmt);
1498 if (result)
1500 if (!update_call_from_tree (gsi, result))
1501 gimplify_and_update_call_from_tree (gsi, result);
1502 return true;
1506 /* Check for virtual calls that became direct calls. */
1507 callee = gimple_call_fn (stmt);
1508 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1510 tree binfo, fndecl, delta, obj;
1511 HOST_WIDE_INT token;
1513 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1515 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1516 return true;
1519 obj = OBJ_TYPE_REF_OBJECT (callee);
1520 binfo = gimple_extract_devirt_binfo_from_cst (obj);
1521 if (!binfo)
1522 return false;
1523 token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1524 fndecl = gimple_get_virt_method_for_binfo (token, binfo, &delta);
1525 if (!fndecl)
1526 return false;
1527 gcc_assert (integer_zerop (delta));
1528 gimple_call_set_fndecl (stmt, fndecl);
1529 return true;
1532 return false;
1535 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1536 distinguishes both cases. */
1538 static bool
1539 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1541 bool changed = false;
1542 gimple stmt = gsi_stmt (*gsi);
1543 unsigned i;
1544 gimple_stmt_iterator gsinext = *gsi;
1545 gimple next_stmt;
1547 gsi_next (&gsinext);
1548 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1550 /* Fold the main computation performed by the statement. */
1551 switch (gimple_code (stmt))
1553 case GIMPLE_ASSIGN:
1555 unsigned old_num_ops = gimple_num_ops (stmt);
1556 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1557 tree lhs = gimple_assign_lhs (stmt);
1558 tree new_rhs;
1559 /* First canonicalize operand order. This avoids building new
1560 trees if this is the only thing fold would later do. */
1561 if ((commutative_tree_code (subcode)
1562 || commutative_ternary_tree_code (subcode))
1563 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1564 gimple_assign_rhs2 (stmt), false))
1566 tree tem = gimple_assign_rhs1 (stmt);
1567 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1568 gimple_assign_set_rhs2 (stmt, tem);
1569 changed = true;
1571 new_rhs = fold_gimple_assign (gsi);
1572 if (new_rhs
1573 && !useless_type_conversion_p (TREE_TYPE (lhs),
1574 TREE_TYPE (new_rhs)))
1575 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1576 if (new_rhs
1577 && (!inplace
1578 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1580 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1581 changed = true;
1583 break;
1586 case GIMPLE_COND:
1587 changed |= fold_gimple_cond (stmt);
1588 break;
1590 case GIMPLE_CALL:
1591 /* Fold *& in call arguments. */
1592 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1593 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1595 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1596 if (tmp)
1598 gimple_call_set_arg (stmt, i, tmp);
1599 changed = true;
1602 changed |= gimple_fold_call (gsi, inplace);
1603 break;
1605 case GIMPLE_ASM:
1606 /* Fold *& in asm operands. */
1607 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1609 tree link = gimple_asm_output_op (stmt, i);
1610 tree op = TREE_VALUE (link);
1611 if (REFERENCE_CLASS_P (op)
1612 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1614 TREE_VALUE (link) = op;
1615 changed = true;
1618 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1620 tree link = gimple_asm_input_op (stmt, i);
1621 tree op = TREE_VALUE (link);
1622 if (REFERENCE_CLASS_P (op)
1623 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1625 TREE_VALUE (link) = op;
1626 changed = true;
1629 break;
1631 case GIMPLE_DEBUG:
1632 if (gimple_debug_bind_p (stmt))
1634 tree val = gimple_debug_bind_get_value (stmt);
1635 if (val
1636 && REFERENCE_CLASS_P (val))
1638 tree tem = maybe_fold_reference (val, false);
1639 if (tem)
1641 gimple_debug_bind_set_value (stmt, tem);
1642 changed = true;
1646 break;
1648 default:;
1651 /* If stmt folds into nothing and it was the last stmt in a bb,
1652 don't call gsi_stmt. */
1653 if (gsi_end_p (*gsi))
1655 gcc_assert (next_stmt == NULL);
1656 return changed;
1659 stmt = gsi_stmt (*gsi);
1661 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1662 as we'd changing the next stmt. */
1663 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1665 tree lhs = gimple_get_lhs (stmt);
1666 if (lhs && REFERENCE_CLASS_P (lhs))
1668 tree new_lhs = maybe_fold_reference (lhs, true);
1669 if (new_lhs)
1671 gimple_set_lhs (stmt, new_lhs);
1672 changed = true;
1677 return changed;
1680 /* Fold the statement pointed to by GSI. In some cases, this function may
1681 replace the whole statement with a new one. Returns true iff folding
1682 makes any changes.
1683 The statement pointed to by GSI should be in valid gimple form but may
1684 be in unfolded state as resulting from for example constant propagation
1685 which can produce *&x = 0. */
1687 bool
1688 fold_stmt (gimple_stmt_iterator *gsi)
1690 return fold_stmt_1 (gsi, false);
1693 /* Perform the minimal folding on statement STMT. Only operations like
1694 *&x created by constant propagation are handled. The statement cannot
1695 be replaced with a new one. Return true if the statement was
1696 changed, false otherwise.
1697 The statement STMT should be in valid gimple form but may
1698 be in unfolded state as resulting from for example constant propagation
1699 which can produce *&x = 0. */
1701 bool
1702 fold_stmt_inplace (gimple stmt)
1704 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1705 bool changed = fold_stmt_1 (&gsi, true);
1706 gcc_assert (gsi_stmt (gsi) == stmt);
1707 return changed;
1710 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1711 if EXPR is null or we don't know how.
1712 If non-null, the result always has boolean type. */
1714 static tree
1715 canonicalize_bool (tree expr, bool invert)
1717 if (!expr)
1718 return NULL_TREE;
1719 else if (invert)
1721 if (integer_nonzerop (expr))
1722 return boolean_false_node;
1723 else if (integer_zerop (expr))
1724 return boolean_true_node;
1725 else if (TREE_CODE (expr) == SSA_NAME)
1726 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1727 build_int_cst (TREE_TYPE (expr), 0));
1728 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1729 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1730 boolean_type_node,
1731 TREE_OPERAND (expr, 0),
1732 TREE_OPERAND (expr, 1));
1733 else
1734 return NULL_TREE;
1736 else
1738 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1739 return expr;
1740 if (integer_nonzerop (expr))
1741 return boolean_true_node;
1742 else if (integer_zerop (expr))
1743 return boolean_false_node;
1744 else if (TREE_CODE (expr) == SSA_NAME)
1745 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1746 build_int_cst (TREE_TYPE (expr), 0));
1747 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1748 return fold_build2 (TREE_CODE (expr),
1749 boolean_type_node,
1750 TREE_OPERAND (expr, 0),
1751 TREE_OPERAND (expr, 1));
1752 else
1753 return NULL_TREE;
1757 /* Check to see if a boolean expression EXPR is logically equivalent to the
1758 comparison (OP1 CODE OP2). Check for various identities involving
1759 SSA_NAMEs. */
1761 static bool
1762 same_bool_comparison_p (const_tree expr, enum tree_code code,
1763 const_tree op1, const_tree op2)
1765 gimple s;
1767 /* The obvious case. */
1768 if (TREE_CODE (expr) == code
1769 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1770 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1771 return true;
1773 /* Check for comparing (name, name != 0) and the case where expr
1774 is an SSA_NAME with a definition matching the comparison. */
1775 if (TREE_CODE (expr) == SSA_NAME
1776 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1778 if (operand_equal_p (expr, op1, 0))
1779 return ((code == NE_EXPR && integer_zerop (op2))
1780 || (code == EQ_EXPR && integer_nonzerop (op2)));
1781 s = SSA_NAME_DEF_STMT (expr);
1782 if (is_gimple_assign (s)
1783 && gimple_assign_rhs_code (s) == code
1784 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1785 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1786 return true;
1789 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1790 of name is a comparison, recurse. */
1791 if (TREE_CODE (op1) == SSA_NAME
1792 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1794 s = SSA_NAME_DEF_STMT (op1);
1795 if (is_gimple_assign (s)
1796 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1798 enum tree_code c = gimple_assign_rhs_code (s);
1799 if ((c == NE_EXPR && integer_zerop (op2))
1800 || (c == EQ_EXPR && integer_nonzerop (op2)))
1801 return same_bool_comparison_p (expr, c,
1802 gimple_assign_rhs1 (s),
1803 gimple_assign_rhs2 (s));
1804 if ((c == EQ_EXPR && integer_zerop (op2))
1805 || (c == NE_EXPR && integer_nonzerop (op2)))
1806 return same_bool_comparison_p (expr,
1807 invert_tree_comparison (c, false),
1808 gimple_assign_rhs1 (s),
1809 gimple_assign_rhs2 (s));
1812 return false;
1815 /* Check to see if two boolean expressions OP1 and OP2 are logically
1816 equivalent. */
1818 static bool
1819 same_bool_result_p (const_tree op1, const_tree op2)
1821 /* Simple cases first. */
1822 if (operand_equal_p (op1, op2, 0))
1823 return true;
1825 /* Check the cases where at least one of the operands is a comparison.
1826 These are a bit smarter than operand_equal_p in that they apply some
1827 identifies on SSA_NAMEs. */
1828 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1829 && same_bool_comparison_p (op1, TREE_CODE (op2),
1830 TREE_OPERAND (op2, 0),
1831 TREE_OPERAND (op2, 1)))
1832 return true;
1833 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1834 && same_bool_comparison_p (op2, TREE_CODE (op1),
1835 TREE_OPERAND (op1, 0),
1836 TREE_OPERAND (op1, 1)))
1837 return true;
1839 /* Default case. */
1840 return false;
1843 /* Forward declarations for some mutually recursive functions. */
1845 static tree
1846 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1847 enum tree_code code2, tree op2a, tree op2b);
1848 static tree
1849 and_var_with_comparison (tree var, bool invert,
1850 enum tree_code code2, tree op2a, tree op2b);
1851 static tree
1852 and_var_with_comparison_1 (gimple stmt,
1853 enum tree_code code2, tree op2a, tree op2b);
1854 static tree
1855 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1856 enum tree_code code2, tree op2a, tree op2b);
1857 static tree
1858 or_var_with_comparison (tree var, bool invert,
1859 enum tree_code code2, tree op2a, tree op2b);
1860 static tree
1861 or_var_with_comparison_1 (gimple stmt,
1862 enum tree_code code2, tree op2a, tree op2b);
1864 /* Helper function for and_comparisons_1: try to simplify the AND of the
1865 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1866 If INVERT is true, invert the value of the VAR before doing the AND.
1867 Return NULL_EXPR if we can't simplify this to a single expression. */
1869 static tree
1870 and_var_with_comparison (tree var, bool invert,
1871 enum tree_code code2, tree op2a, tree op2b)
1873 tree t;
1874 gimple stmt = SSA_NAME_DEF_STMT (var);
1876 /* We can only deal with variables whose definitions are assignments. */
1877 if (!is_gimple_assign (stmt))
1878 return NULL_TREE;
1880 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1881 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1882 Then we only have to consider the simpler non-inverted cases. */
1883 if (invert)
1884 t = or_var_with_comparison_1 (stmt,
1885 invert_tree_comparison (code2, false),
1886 op2a, op2b);
1887 else
1888 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1889 return canonicalize_bool (t, invert);
1892 /* Try to simplify the AND of the ssa variable defined by the assignment
1893 STMT with the comparison specified by (OP2A CODE2 OP2B).
1894 Return NULL_EXPR if we can't simplify this to a single expression. */
1896 static tree
1897 and_var_with_comparison_1 (gimple stmt,
1898 enum tree_code code2, tree op2a, tree op2b)
1900 tree var = gimple_assign_lhs (stmt);
1901 tree true_test_var = NULL_TREE;
1902 tree false_test_var = NULL_TREE;
1903 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1905 /* Check for identities like (var AND (var == 0)) => false. */
1906 if (TREE_CODE (op2a) == SSA_NAME
1907 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1909 if ((code2 == NE_EXPR && integer_zerop (op2b))
1910 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1912 true_test_var = op2a;
1913 if (var == true_test_var)
1914 return var;
1916 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1917 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1919 false_test_var = op2a;
1920 if (var == false_test_var)
1921 return boolean_false_node;
1925 /* If the definition is a comparison, recurse on it. */
1926 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1928 tree t = and_comparisons_1 (innercode,
1929 gimple_assign_rhs1 (stmt),
1930 gimple_assign_rhs2 (stmt),
1931 code2,
1932 op2a,
1933 op2b);
1934 if (t)
1935 return t;
1938 /* If the definition is an AND or OR expression, we may be able to
1939 simplify by reassociating. */
1940 if (innercode == TRUTH_AND_EXPR
1941 || innercode == TRUTH_OR_EXPR
1942 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1943 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1945 tree inner1 = gimple_assign_rhs1 (stmt);
1946 tree inner2 = gimple_assign_rhs2 (stmt);
1947 gimple s;
1948 tree t;
1949 tree partial = NULL_TREE;
1950 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1952 /* Check for boolean identities that don't require recursive examination
1953 of inner1/inner2:
1954 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1955 inner1 AND (inner1 OR inner2) => inner1
1956 !inner1 AND (inner1 AND inner2) => false
1957 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1958 Likewise for similar cases involving inner2. */
1959 if (inner1 == true_test_var)
1960 return (is_and ? var : inner1);
1961 else if (inner2 == true_test_var)
1962 return (is_and ? var : inner2);
1963 else if (inner1 == false_test_var)
1964 return (is_and
1965 ? boolean_false_node
1966 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1967 else if (inner2 == false_test_var)
1968 return (is_and
1969 ? boolean_false_node
1970 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1972 /* Next, redistribute/reassociate the AND across the inner tests.
1973 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1974 if (TREE_CODE (inner1) == SSA_NAME
1975 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1976 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1977 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1978 gimple_assign_rhs1 (s),
1979 gimple_assign_rhs2 (s),
1980 code2, op2a, op2b)))
1982 /* Handle the AND case, where we are reassociating:
1983 (inner1 AND inner2) AND (op2a code2 op2b)
1984 => (t AND inner2)
1985 If the partial result t is a constant, we win. Otherwise
1986 continue on to try reassociating with the other inner test. */
1987 if (is_and)
1989 if (integer_onep (t))
1990 return inner2;
1991 else if (integer_zerop (t))
1992 return boolean_false_node;
1995 /* Handle the OR case, where we are redistributing:
1996 (inner1 OR inner2) AND (op2a code2 op2b)
1997 => (t OR (inner2 AND (op2a code2 op2b))) */
1998 else if (integer_onep (t))
1999 return boolean_true_node;
2001 /* Save partial result for later. */
2002 partial = t;
2005 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2006 if (TREE_CODE (inner2) == SSA_NAME
2007 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2008 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2009 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2010 gimple_assign_rhs1 (s),
2011 gimple_assign_rhs2 (s),
2012 code2, op2a, op2b)))
2014 /* Handle the AND case, where we are reassociating:
2015 (inner1 AND inner2) AND (op2a code2 op2b)
2016 => (inner1 AND t) */
2017 if (is_and)
2019 if (integer_onep (t))
2020 return inner1;
2021 else if (integer_zerop (t))
2022 return boolean_false_node;
2023 /* If both are the same, we can apply the identity
2024 (x AND x) == x. */
2025 else if (partial && same_bool_result_p (t, partial))
2026 return t;
2029 /* Handle the OR case. where we are redistributing:
2030 (inner1 OR inner2) AND (op2a code2 op2b)
2031 => (t OR (inner1 AND (op2a code2 op2b)))
2032 => (t OR partial) */
2033 else
2035 if (integer_onep (t))
2036 return boolean_true_node;
2037 else if (partial)
2039 /* We already got a simplification for the other
2040 operand to the redistributed OR expression. The
2041 interesting case is when at least one is false.
2042 Or, if both are the same, we can apply the identity
2043 (x OR x) == x. */
2044 if (integer_zerop (partial))
2045 return t;
2046 else if (integer_zerop (t))
2047 return partial;
2048 else if (same_bool_result_p (t, partial))
2049 return t;
2054 return NULL_TREE;
2057 /* Try to simplify the AND of two comparisons defined by
2058 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2059 If this can be done without constructing an intermediate value,
2060 return the resulting tree; otherwise NULL_TREE is returned.
2061 This function is deliberately asymmetric as it recurses on SSA_DEFs
2062 in the first comparison but not the second. */
2064 static tree
2065 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2066 enum tree_code code2, tree op2a, tree op2b)
2068 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2069 if (operand_equal_p (op1a, op2a, 0)
2070 && operand_equal_p (op1b, op2b, 0))
2072 tree t = combine_comparisons (UNKNOWN_LOCATION,
2073 TRUTH_ANDIF_EXPR, code1, code2,
2074 boolean_type_node, op1a, op1b);
2075 if (t)
2076 return t;
2079 /* Likewise the swapped case of the above. */
2080 if (operand_equal_p (op1a, op2b, 0)
2081 && operand_equal_p (op1b, op2a, 0))
2083 tree t = combine_comparisons (UNKNOWN_LOCATION,
2084 TRUTH_ANDIF_EXPR, code1,
2085 swap_tree_comparison (code2),
2086 boolean_type_node, op1a, op1b);
2087 if (t)
2088 return t;
2091 /* If both comparisons are of the same value against constants, we might
2092 be able to merge them. */
2093 if (operand_equal_p (op1a, op2a, 0)
2094 && TREE_CODE (op1b) == INTEGER_CST
2095 && TREE_CODE (op2b) == INTEGER_CST)
2097 int cmp = tree_int_cst_compare (op1b, op2b);
2099 /* If we have (op1a == op1b), we should either be able to
2100 return that or FALSE, depending on whether the constant op1b
2101 also satisfies the other comparison against op2b. */
2102 if (code1 == EQ_EXPR)
2104 bool done = true;
2105 bool val;
2106 switch (code2)
2108 case EQ_EXPR: val = (cmp == 0); break;
2109 case NE_EXPR: val = (cmp != 0); break;
2110 case LT_EXPR: val = (cmp < 0); break;
2111 case GT_EXPR: val = (cmp > 0); break;
2112 case LE_EXPR: val = (cmp <= 0); break;
2113 case GE_EXPR: val = (cmp >= 0); break;
2114 default: done = false;
2116 if (done)
2118 if (val)
2119 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2120 else
2121 return boolean_false_node;
2124 /* Likewise if the second comparison is an == comparison. */
2125 else if (code2 == EQ_EXPR)
2127 bool done = true;
2128 bool val;
2129 switch (code1)
2131 case EQ_EXPR: val = (cmp == 0); break;
2132 case NE_EXPR: val = (cmp != 0); break;
2133 case LT_EXPR: val = (cmp > 0); break;
2134 case GT_EXPR: val = (cmp < 0); break;
2135 case LE_EXPR: val = (cmp >= 0); break;
2136 case GE_EXPR: val = (cmp <= 0); break;
2137 default: done = false;
2139 if (done)
2141 if (val)
2142 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2143 else
2144 return boolean_false_node;
2148 /* Same business with inequality tests. */
2149 else if (code1 == NE_EXPR)
2151 bool val;
2152 switch (code2)
2154 case EQ_EXPR: val = (cmp != 0); break;
2155 case NE_EXPR: val = (cmp == 0); break;
2156 case LT_EXPR: val = (cmp >= 0); break;
2157 case GT_EXPR: val = (cmp <= 0); break;
2158 case LE_EXPR: val = (cmp > 0); break;
2159 case GE_EXPR: val = (cmp < 0); break;
2160 default:
2161 val = false;
2163 if (val)
2164 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2166 else if (code2 == NE_EXPR)
2168 bool val;
2169 switch (code1)
2171 case EQ_EXPR: val = (cmp == 0); break;
2172 case NE_EXPR: val = (cmp != 0); break;
2173 case LT_EXPR: val = (cmp <= 0); break;
2174 case GT_EXPR: val = (cmp >= 0); break;
2175 case LE_EXPR: val = (cmp < 0); break;
2176 case GE_EXPR: val = (cmp > 0); break;
2177 default:
2178 val = false;
2180 if (val)
2181 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2184 /* Chose the more restrictive of two < or <= comparisons. */
2185 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2186 && (code2 == LT_EXPR || code2 == LE_EXPR))
2188 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2189 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2190 else
2191 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2194 /* Likewise chose the more restrictive of two > or >= comparisons. */
2195 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2196 && (code2 == GT_EXPR || code2 == GE_EXPR))
2198 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2199 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2200 else
2201 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2204 /* Check for singleton ranges. */
2205 else if (cmp == 0
2206 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2207 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2208 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2210 /* Check for disjoint ranges. */
2211 else if (cmp <= 0
2212 && (code1 == LT_EXPR || code1 == LE_EXPR)
2213 && (code2 == GT_EXPR || code2 == GE_EXPR))
2214 return boolean_false_node;
2215 else if (cmp >= 0
2216 && (code1 == GT_EXPR || code1 == GE_EXPR)
2217 && (code2 == LT_EXPR || code2 == LE_EXPR))
2218 return boolean_false_node;
2221 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2222 NAME's definition is a truth value. See if there are any simplifications
2223 that can be done against the NAME's definition. */
2224 if (TREE_CODE (op1a) == SSA_NAME
2225 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2226 && (integer_zerop (op1b) || integer_onep (op1b)))
2228 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2229 || (code1 == NE_EXPR && integer_onep (op1b)));
2230 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2231 switch (gimple_code (stmt))
2233 case GIMPLE_ASSIGN:
2234 /* Try to simplify by copy-propagating the definition. */
2235 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2237 case GIMPLE_PHI:
2238 /* If every argument to the PHI produces the same result when
2239 ANDed with the second comparison, we win.
2240 Do not do this unless the type is bool since we need a bool
2241 result here anyway. */
2242 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2244 tree result = NULL_TREE;
2245 unsigned i;
2246 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2248 tree arg = gimple_phi_arg_def (stmt, i);
2250 /* If this PHI has itself as an argument, ignore it.
2251 If all the other args produce the same result,
2252 we're still OK. */
2253 if (arg == gimple_phi_result (stmt))
2254 continue;
2255 else if (TREE_CODE (arg) == INTEGER_CST)
2257 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2259 if (!result)
2260 result = boolean_false_node;
2261 else if (!integer_zerop (result))
2262 return NULL_TREE;
2264 else if (!result)
2265 result = fold_build2 (code2, boolean_type_node,
2266 op2a, op2b);
2267 else if (!same_bool_comparison_p (result,
2268 code2, op2a, op2b))
2269 return NULL_TREE;
2271 else if (TREE_CODE (arg) == SSA_NAME
2272 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2274 tree temp;
2275 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2276 /* In simple cases we can look through PHI nodes,
2277 but we have to be careful with loops.
2278 See PR49073. */
2279 if (! dom_info_available_p (CDI_DOMINATORS)
2280 || gimple_bb (def_stmt) == gimple_bb (stmt)
2281 || dominated_by_p (CDI_DOMINATORS,
2282 gimple_bb (def_stmt),
2283 gimple_bb (stmt)))
2284 return NULL_TREE;
2285 temp = and_var_with_comparison (arg, invert, code2,
2286 op2a, op2b);
2287 if (!temp)
2288 return NULL_TREE;
2289 else if (!result)
2290 result = temp;
2291 else if (!same_bool_result_p (result, temp))
2292 return NULL_TREE;
2294 else
2295 return NULL_TREE;
2297 return result;
2300 default:
2301 break;
2304 return NULL_TREE;
2307 /* Try to simplify the AND of two comparisons, specified by
2308 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2309 If this can be simplified to a single expression (without requiring
2310 introducing more SSA variables to hold intermediate values),
2311 return the resulting tree. Otherwise return NULL_TREE.
2312 If the result expression is non-null, it has boolean type. */
2314 tree
2315 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2316 enum tree_code code2, tree op2a, tree op2b)
2318 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2319 if (t)
2320 return t;
2321 else
2322 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2325 /* Helper function for or_comparisons_1: try to simplify the OR of the
2326 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2327 If INVERT is true, invert the value of VAR before doing the OR.
2328 Return NULL_EXPR if we can't simplify this to a single expression. */
2330 static tree
2331 or_var_with_comparison (tree var, bool invert,
2332 enum tree_code code2, tree op2a, tree op2b)
2334 tree t;
2335 gimple stmt = SSA_NAME_DEF_STMT (var);
2337 /* We can only deal with variables whose definitions are assignments. */
2338 if (!is_gimple_assign (stmt))
2339 return NULL_TREE;
2341 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2342 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2343 Then we only have to consider the simpler non-inverted cases. */
2344 if (invert)
2345 t = and_var_with_comparison_1 (stmt,
2346 invert_tree_comparison (code2, false),
2347 op2a, op2b);
2348 else
2349 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2350 return canonicalize_bool (t, invert);
2353 /* Try to simplify the OR of the ssa variable defined by the assignment
2354 STMT with the comparison specified by (OP2A CODE2 OP2B).
2355 Return NULL_EXPR if we can't simplify this to a single expression. */
2357 static tree
2358 or_var_with_comparison_1 (gimple stmt,
2359 enum tree_code code2, tree op2a, tree op2b)
2361 tree var = gimple_assign_lhs (stmt);
2362 tree true_test_var = NULL_TREE;
2363 tree false_test_var = NULL_TREE;
2364 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2366 /* Check for identities like (var OR (var != 0)) => true . */
2367 if (TREE_CODE (op2a) == SSA_NAME
2368 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2370 if ((code2 == NE_EXPR && integer_zerop (op2b))
2371 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2373 true_test_var = op2a;
2374 if (var == true_test_var)
2375 return var;
2377 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2378 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2380 false_test_var = op2a;
2381 if (var == false_test_var)
2382 return boolean_true_node;
2386 /* If the definition is a comparison, recurse on it. */
2387 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2389 tree t = or_comparisons_1 (innercode,
2390 gimple_assign_rhs1 (stmt),
2391 gimple_assign_rhs2 (stmt),
2392 code2,
2393 op2a,
2394 op2b);
2395 if (t)
2396 return t;
2399 /* If the definition is an AND or OR expression, we may be able to
2400 simplify by reassociating. */
2401 if (innercode == TRUTH_AND_EXPR
2402 || innercode == TRUTH_OR_EXPR
2403 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2404 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2406 tree inner1 = gimple_assign_rhs1 (stmt);
2407 tree inner2 = gimple_assign_rhs2 (stmt);
2408 gimple s;
2409 tree t;
2410 tree partial = NULL_TREE;
2411 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2413 /* Check for boolean identities that don't require recursive examination
2414 of inner1/inner2:
2415 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2416 inner1 OR (inner1 AND inner2) => inner1
2417 !inner1 OR (inner1 OR inner2) => true
2418 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2420 if (inner1 == true_test_var)
2421 return (is_or ? var : inner1);
2422 else if (inner2 == true_test_var)
2423 return (is_or ? var : inner2);
2424 else if (inner1 == false_test_var)
2425 return (is_or
2426 ? boolean_true_node
2427 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2428 else if (inner2 == false_test_var)
2429 return (is_or
2430 ? boolean_true_node
2431 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2433 /* Next, redistribute/reassociate the OR across the inner tests.
2434 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2435 if (TREE_CODE (inner1) == SSA_NAME
2436 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2437 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2438 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2439 gimple_assign_rhs1 (s),
2440 gimple_assign_rhs2 (s),
2441 code2, op2a, op2b)))
2443 /* Handle the OR case, where we are reassociating:
2444 (inner1 OR inner2) OR (op2a code2 op2b)
2445 => (t OR inner2)
2446 If the partial result t is a constant, we win. Otherwise
2447 continue on to try reassociating with the other inner test. */
2448 if (is_or)
2450 if (integer_onep (t))
2451 return boolean_true_node;
2452 else if (integer_zerop (t))
2453 return inner2;
2456 /* Handle the AND case, where we are redistributing:
2457 (inner1 AND inner2) OR (op2a code2 op2b)
2458 => (t AND (inner2 OR (op2a code op2b))) */
2459 else if (integer_zerop (t))
2460 return boolean_false_node;
2462 /* Save partial result for later. */
2463 partial = t;
2466 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2467 if (TREE_CODE (inner2) == SSA_NAME
2468 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2469 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2470 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2471 gimple_assign_rhs1 (s),
2472 gimple_assign_rhs2 (s),
2473 code2, op2a, op2b)))
2475 /* Handle the OR case, where we are reassociating:
2476 (inner1 OR inner2) OR (op2a code2 op2b)
2477 => (inner1 OR t)
2478 => (t OR partial) */
2479 if (is_or)
2481 if (integer_zerop (t))
2482 return inner1;
2483 else if (integer_onep (t))
2484 return boolean_true_node;
2485 /* If both are the same, we can apply the identity
2486 (x OR x) == x. */
2487 else if (partial && same_bool_result_p (t, partial))
2488 return t;
2491 /* Handle the AND case, where we are redistributing:
2492 (inner1 AND inner2) OR (op2a code2 op2b)
2493 => (t AND (inner1 OR (op2a code2 op2b)))
2494 => (t AND partial) */
2495 else
2497 if (integer_zerop (t))
2498 return boolean_false_node;
2499 else if (partial)
2501 /* We already got a simplification for the other
2502 operand to the redistributed AND expression. The
2503 interesting case is when at least one is true.
2504 Or, if both are the same, we can apply the identity
2505 (x AND x) == x. */
2506 if (integer_onep (partial))
2507 return t;
2508 else if (integer_onep (t))
2509 return partial;
2510 else if (same_bool_result_p (t, partial))
2511 return t;
2516 return NULL_TREE;
2519 /* Try to simplify the OR of two comparisons defined by
2520 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2521 If this can be done without constructing an intermediate value,
2522 return the resulting tree; otherwise NULL_TREE is returned.
2523 This function is deliberately asymmetric as it recurses on SSA_DEFs
2524 in the first comparison but not the second. */
2526 static tree
2527 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2528 enum tree_code code2, tree op2a, tree op2b)
2530 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2531 if (operand_equal_p (op1a, op2a, 0)
2532 && operand_equal_p (op1b, op2b, 0))
2534 tree t = combine_comparisons (UNKNOWN_LOCATION,
2535 TRUTH_ORIF_EXPR, code1, code2,
2536 boolean_type_node, op1a, op1b);
2537 if (t)
2538 return t;
2541 /* Likewise the swapped case of the above. */
2542 if (operand_equal_p (op1a, op2b, 0)
2543 && operand_equal_p (op1b, op2a, 0))
2545 tree t = combine_comparisons (UNKNOWN_LOCATION,
2546 TRUTH_ORIF_EXPR, code1,
2547 swap_tree_comparison (code2),
2548 boolean_type_node, op1a, op1b);
2549 if (t)
2550 return t;
2553 /* If both comparisons are of the same value against constants, we might
2554 be able to merge them. */
2555 if (operand_equal_p (op1a, op2a, 0)
2556 && TREE_CODE (op1b) == INTEGER_CST
2557 && TREE_CODE (op2b) == INTEGER_CST)
2559 int cmp = tree_int_cst_compare (op1b, op2b);
2561 /* If we have (op1a != op1b), we should either be able to
2562 return that or TRUE, depending on whether the constant op1b
2563 also satisfies the other comparison against op2b. */
2564 if (code1 == NE_EXPR)
2566 bool done = true;
2567 bool val;
2568 switch (code2)
2570 case EQ_EXPR: val = (cmp == 0); break;
2571 case NE_EXPR: val = (cmp != 0); break;
2572 case LT_EXPR: val = (cmp < 0); break;
2573 case GT_EXPR: val = (cmp > 0); break;
2574 case LE_EXPR: val = (cmp <= 0); break;
2575 case GE_EXPR: val = (cmp >= 0); break;
2576 default: done = false;
2578 if (done)
2580 if (val)
2581 return boolean_true_node;
2582 else
2583 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2586 /* Likewise if the second comparison is a != comparison. */
2587 else if (code2 == NE_EXPR)
2589 bool done = true;
2590 bool val;
2591 switch (code1)
2593 case EQ_EXPR: val = (cmp == 0); break;
2594 case NE_EXPR: val = (cmp != 0); break;
2595 case LT_EXPR: val = (cmp > 0); break;
2596 case GT_EXPR: val = (cmp < 0); break;
2597 case LE_EXPR: val = (cmp >= 0); break;
2598 case GE_EXPR: val = (cmp <= 0); break;
2599 default: done = false;
2601 if (done)
2603 if (val)
2604 return boolean_true_node;
2605 else
2606 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2610 /* See if an equality test is redundant with the other comparison. */
2611 else if (code1 == EQ_EXPR)
2613 bool val;
2614 switch (code2)
2616 case EQ_EXPR: val = (cmp == 0); break;
2617 case NE_EXPR: val = (cmp != 0); break;
2618 case LT_EXPR: val = (cmp < 0); break;
2619 case GT_EXPR: val = (cmp > 0); break;
2620 case LE_EXPR: val = (cmp <= 0); break;
2621 case GE_EXPR: val = (cmp >= 0); break;
2622 default:
2623 val = false;
2625 if (val)
2626 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2628 else if (code2 == EQ_EXPR)
2630 bool val;
2631 switch (code1)
2633 case EQ_EXPR: val = (cmp == 0); break;
2634 case NE_EXPR: val = (cmp != 0); break;
2635 case LT_EXPR: val = (cmp > 0); break;
2636 case GT_EXPR: val = (cmp < 0); break;
2637 case LE_EXPR: val = (cmp >= 0); break;
2638 case GE_EXPR: val = (cmp <= 0); break;
2639 default:
2640 val = false;
2642 if (val)
2643 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2646 /* Chose the less restrictive of two < or <= comparisons. */
2647 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2648 && (code2 == LT_EXPR || code2 == LE_EXPR))
2650 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2651 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2652 else
2653 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2656 /* Likewise chose the less restrictive of two > or >= comparisons. */
2657 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2658 && (code2 == GT_EXPR || code2 == GE_EXPR))
2660 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2661 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2662 else
2663 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2666 /* Check for singleton ranges. */
2667 else if (cmp == 0
2668 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2669 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2670 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2672 /* Check for less/greater pairs that don't restrict the range at all. */
2673 else if (cmp >= 0
2674 && (code1 == LT_EXPR || code1 == LE_EXPR)
2675 && (code2 == GT_EXPR || code2 == GE_EXPR))
2676 return boolean_true_node;
2677 else if (cmp <= 0
2678 && (code1 == GT_EXPR || code1 == GE_EXPR)
2679 && (code2 == LT_EXPR || code2 == LE_EXPR))
2680 return boolean_true_node;
2683 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2684 NAME's definition is a truth value. See if there are any simplifications
2685 that can be done against the NAME's definition. */
2686 if (TREE_CODE (op1a) == SSA_NAME
2687 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2688 && (integer_zerop (op1b) || integer_onep (op1b)))
2690 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2691 || (code1 == NE_EXPR && integer_onep (op1b)));
2692 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2693 switch (gimple_code (stmt))
2695 case GIMPLE_ASSIGN:
2696 /* Try to simplify by copy-propagating the definition. */
2697 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2699 case GIMPLE_PHI:
2700 /* If every argument to the PHI produces the same result when
2701 ORed with the second comparison, we win.
2702 Do not do this unless the type is bool since we need a bool
2703 result here anyway. */
2704 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2706 tree result = NULL_TREE;
2707 unsigned i;
2708 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2710 tree arg = gimple_phi_arg_def (stmt, i);
2712 /* If this PHI has itself as an argument, ignore it.
2713 If all the other args produce the same result,
2714 we're still OK. */
2715 if (arg == gimple_phi_result (stmt))
2716 continue;
2717 else if (TREE_CODE (arg) == INTEGER_CST)
2719 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2721 if (!result)
2722 result = boolean_true_node;
2723 else if (!integer_onep (result))
2724 return NULL_TREE;
2726 else if (!result)
2727 result = fold_build2 (code2, boolean_type_node,
2728 op2a, op2b);
2729 else if (!same_bool_comparison_p (result,
2730 code2, op2a, op2b))
2731 return NULL_TREE;
2733 else if (TREE_CODE (arg) == SSA_NAME
2734 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2736 tree temp;
2737 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2738 /* In simple cases we can look through PHI nodes,
2739 but we have to be careful with loops.
2740 See PR49073. */
2741 if (! dom_info_available_p (CDI_DOMINATORS)
2742 || gimple_bb (def_stmt) == gimple_bb (stmt)
2743 || dominated_by_p (CDI_DOMINATORS,
2744 gimple_bb (def_stmt),
2745 gimple_bb (stmt)))
2746 return NULL_TREE;
2747 temp = or_var_with_comparison (arg, invert, code2,
2748 op2a, op2b);
2749 if (!temp)
2750 return NULL_TREE;
2751 else if (!result)
2752 result = temp;
2753 else if (!same_bool_result_p (result, temp))
2754 return NULL_TREE;
2756 else
2757 return NULL_TREE;
2759 return result;
2762 default:
2763 break;
2766 return NULL_TREE;
2769 /* Try to simplify the OR of two comparisons, specified by
2770 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2771 If this can be simplified to a single expression (without requiring
2772 introducing more SSA variables to hold intermediate values),
2773 return the resulting tree. Otherwise return NULL_TREE.
2774 If the result expression is non-null, it has boolean type. */
2776 tree
2777 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2778 enum tree_code code2, tree op2a, tree op2b)
2780 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2781 if (t)
2782 return t;
2783 else
2784 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2788 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2790 Either NULL_TREE, a simplified but non-constant or a constant
2791 is returned.
2793 ??? This should go into a gimple-fold-inline.h file to be eventually
2794 privatized with the single valueize function used in the various TUs
2795 to avoid the indirect function call overhead. */
2797 tree
2798 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2800 location_t loc = gimple_location (stmt);
2801 switch (gimple_code (stmt))
2803 case GIMPLE_ASSIGN:
2805 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2807 switch (get_gimple_rhs_class (subcode))
2809 case GIMPLE_SINGLE_RHS:
2811 tree rhs = gimple_assign_rhs1 (stmt);
2812 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2814 if (TREE_CODE (rhs) == SSA_NAME)
2816 /* If the RHS is an SSA_NAME, return its known constant value,
2817 if any. */
2818 return (*valueize) (rhs);
2820 /* Handle propagating invariant addresses into address
2821 operations. */
2822 else if (TREE_CODE (rhs) == ADDR_EXPR
2823 && !is_gimple_min_invariant (rhs))
2825 HOST_WIDE_INT offset;
2826 tree base;
2827 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2828 &offset,
2829 valueize);
2830 if (base
2831 && (CONSTANT_CLASS_P (base)
2832 || decl_address_invariant_p (base)))
2833 return build_invariant_address (TREE_TYPE (rhs),
2834 base, offset);
2836 else if (TREE_CODE (rhs) == CONSTRUCTOR
2837 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2838 && (CONSTRUCTOR_NELTS (rhs)
2839 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2841 unsigned i;
2842 tree val, list;
2844 list = NULL_TREE;
2845 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2847 val = (*valueize) (val);
2848 if (TREE_CODE (val) == INTEGER_CST
2849 || TREE_CODE (val) == REAL_CST
2850 || TREE_CODE (val) == FIXED_CST)
2851 list = tree_cons (NULL_TREE, val, list);
2852 else
2853 return NULL_TREE;
2856 return build_vector (TREE_TYPE (rhs), nreverse (list));
2859 if (kind == tcc_reference)
2861 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2862 || TREE_CODE (rhs) == REALPART_EXPR
2863 || TREE_CODE (rhs) == IMAGPART_EXPR)
2864 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2866 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2867 return fold_unary_loc (EXPR_LOCATION (rhs),
2868 TREE_CODE (rhs),
2869 TREE_TYPE (rhs), val);
2871 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2872 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2874 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2875 return fold_ternary_loc (EXPR_LOCATION (rhs),
2876 TREE_CODE (rhs),
2877 TREE_TYPE (rhs), val,
2878 TREE_OPERAND (rhs, 1),
2879 TREE_OPERAND (rhs, 2));
2881 else if (TREE_CODE (rhs) == MEM_REF
2882 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2884 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2885 if (TREE_CODE (val) == ADDR_EXPR
2886 && is_gimple_min_invariant (val))
2888 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2889 unshare_expr (val),
2890 TREE_OPERAND (rhs, 1));
2891 if (tem)
2892 rhs = tem;
2895 return fold_const_aggregate_ref_1 (rhs, valueize);
2897 else if (kind == tcc_declaration)
2898 return get_symbol_constant_value (rhs);
2899 return rhs;
2902 case GIMPLE_UNARY_RHS:
2904 /* Handle unary operators that can appear in GIMPLE form.
2905 Note that we know the single operand must be a constant,
2906 so this should almost always return a simplified RHS. */
2907 tree lhs = gimple_assign_lhs (stmt);
2908 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2910 /* Conversions are useless for CCP purposes if they are
2911 value-preserving. Thus the restrictions that
2912 useless_type_conversion_p places for pointer type conversions
2913 do not apply here. Substitution later will only substitute to
2914 allowed places. */
2915 if (CONVERT_EXPR_CODE_P (subcode)
2916 && POINTER_TYPE_P (TREE_TYPE (lhs))
2917 && POINTER_TYPE_P (TREE_TYPE (op0)))
2919 tree tem;
2920 /* Try to re-construct array references on-the-fly. */
2921 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2922 TREE_TYPE (op0))
2923 && ((tem = maybe_fold_offset_to_address
2924 (loc,
2925 op0, integer_zero_node, TREE_TYPE (lhs)))
2926 != NULL_TREE))
2927 return tem;
2928 return op0;
2931 return
2932 fold_unary_ignore_overflow_loc (loc, subcode,
2933 gimple_expr_type (stmt), op0);
2936 case GIMPLE_BINARY_RHS:
2938 /* Handle binary operators that can appear in GIMPLE form. */
2939 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2940 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2942 /* Translate &x + CST into an invariant form suitable for
2943 further propagation. */
2944 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2945 && TREE_CODE (op0) == ADDR_EXPR
2946 && TREE_CODE (op1) == INTEGER_CST)
2948 tree off = fold_convert (ptr_type_node, op1);
2949 return build_fold_addr_expr
2950 (fold_build2 (MEM_REF,
2951 TREE_TYPE (TREE_TYPE (op0)),
2952 unshare_expr (op0), off));
2955 return fold_binary_loc (loc, subcode,
2956 gimple_expr_type (stmt), op0, op1);
2959 case GIMPLE_TERNARY_RHS:
2961 /* Handle ternary operators that can appear in GIMPLE form. */
2962 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2963 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2964 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2966 return fold_ternary_loc (loc, subcode,
2967 gimple_expr_type (stmt), op0, op1, op2);
2970 default:
2971 gcc_unreachable ();
2975 case GIMPLE_CALL:
2977 tree fn;
2979 if (gimple_call_internal_p (stmt))
2980 /* No folding yet for these functions. */
2981 return NULL_TREE;
2983 fn = (*valueize) (gimple_call_fn (stmt));
2984 if (TREE_CODE (fn) == ADDR_EXPR
2985 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2986 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2988 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2989 tree call, retval;
2990 unsigned i;
2991 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2992 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2993 call = build_call_array_loc (loc,
2994 gimple_call_return_type (stmt),
2995 fn, gimple_call_num_args (stmt), args);
2996 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2997 if (retval)
2998 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2999 STRIP_NOPS (retval);
3000 return retval;
3002 return NULL_TREE;
3005 default:
3006 return NULL_TREE;
3010 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
3011 Returns NULL_TREE if folding to a constant is not possible, otherwise
3012 returns a constant according to is_gimple_min_invariant. */
3014 tree
3015 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
3017 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
3018 if (res && is_gimple_min_invariant (res))
3019 return res;
3020 return NULL_TREE;
3024 /* The following set of functions are supposed to fold references using
3025 their constant initializers. */
3027 static tree fold_ctor_reference (tree type, tree ctor,
3028 unsigned HOST_WIDE_INT offset,
3029 unsigned HOST_WIDE_INT size);
3031 /* See if we can find constructor defining value of BASE.
3032 When we know the consructor with constant offset (such as
3033 base is array[40] and we do know constructor of array), then
3034 BIT_OFFSET is adjusted accordingly.
3036 As a special case, return error_mark_node when constructor
3037 is not explicitly available, but it is known to be zero
3038 such as 'static const int a;'. */
3039 static tree
3040 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
3041 tree (*valueize)(tree))
3043 HOST_WIDE_INT bit_offset2, size, max_size;
3044 if (TREE_CODE (base) == MEM_REF)
3046 if (!integer_zerop (TREE_OPERAND (base, 1)))
3048 if (!host_integerp (TREE_OPERAND (base, 1), 0))
3049 return NULL_TREE;
3050 *bit_offset += (mem_ref_offset (base).low
3051 * BITS_PER_UNIT);
3054 if (valueize
3055 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3056 base = valueize (TREE_OPERAND (base, 0));
3057 if (!base || TREE_CODE (base) != ADDR_EXPR)
3058 return NULL_TREE;
3059 base = TREE_OPERAND (base, 0);
3062 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
3063 DECL_INITIAL. If BASE is a nested reference into another
3064 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
3065 the inner reference. */
3066 switch (TREE_CODE (base))
3068 case VAR_DECL:
3069 if (!const_value_known_p (base))
3070 return NULL_TREE;
3072 /* Fallthru. */
3073 case CONST_DECL:
3074 if (!DECL_INITIAL (base)
3075 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
3076 return error_mark_node;
3077 return DECL_INITIAL (base);
3079 case ARRAY_REF:
3080 case COMPONENT_REF:
3081 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
3082 if (max_size == -1 || size != max_size)
3083 return NULL_TREE;
3084 *bit_offset += bit_offset2;
3085 return get_base_constructor (base, bit_offset, valueize);
3087 case STRING_CST:
3088 case CONSTRUCTOR:
3089 return base;
3091 default:
3092 return NULL_TREE;
3096 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
3097 to the memory at bit OFFSET.
3099 We do only simple job of folding byte accesses. */
3101 static tree
3102 fold_string_cst_ctor_reference (tree type, tree ctor,
3103 unsigned HOST_WIDE_INT offset,
3104 unsigned HOST_WIDE_INT size)
3106 if (INTEGRAL_TYPE_P (type)
3107 && (TYPE_MODE (type)
3108 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3109 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3110 == MODE_INT)
3111 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
3112 && size == BITS_PER_UNIT
3113 && !(offset % BITS_PER_UNIT))
3115 offset /= BITS_PER_UNIT;
3116 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
3117 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
3118 [offset]));
3119 /* Folding
3120 const char a[20]="hello";
3121 return a[10];
3123 might lead to offset greater than string length. In this case we
3124 know value is either initialized to 0 or out of bounds. Return 0
3125 in both cases. */
3126 return build_zero_cst (type);
3128 return NULL_TREE;
3131 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3132 SIZE to the memory at bit OFFSET. */
3134 static tree
3135 fold_array_ctor_reference (tree type, tree ctor,
3136 unsigned HOST_WIDE_INT offset,
3137 unsigned HOST_WIDE_INT size)
3139 unsigned HOST_WIDE_INT cnt;
3140 tree cfield, cval;
3141 double_int low_bound, elt_size;
3142 double_int index, max_index;
3143 double_int access_index;
3144 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
3145 HOST_WIDE_INT inner_offset;
3147 /* Compute low bound and elt size. */
3148 if (domain_type && TYPE_MIN_VALUE (domain_type))
3150 /* Static constructors for variably sized objects makes no sense. */
3151 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
3152 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
3154 else
3155 low_bound = double_int_zero;
3156 /* Static constructors for variably sized objects makes no sense. */
3157 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
3158 == INTEGER_CST);
3159 elt_size =
3160 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
3163 /* We can handle only constantly sized accesses that are known to not
3164 be larger than size of array element. */
3165 if (!TYPE_SIZE_UNIT (type)
3166 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
3167 || double_int_cmp (elt_size,
3168 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
3169 return NULL_TREE;
3171 /* Compute the array index we look for. */
3172 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
3173 elt_size, TRUNC_DIV_EXPR);
3174 access_index = double_int_add (access_index, low_bound);
3176 /* And offset within the access. */
3177 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
3179 /* See if the array field is large enough to span whole access. We do not
3180 care to fold accesses spanning multiple array indexes. */
3181 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
3182 return NULL_TREE;
3184 index = double_int_sub (low_bound, double_int_one);
3185 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
3187 /* Array constructor might explicitely set index, or specify range
3188 or leave index NULL meaning that it is next index after previous
3189 one. */
3190 if (cfield)
3192 if (TREE_CODE (cfield) == INTEGER_CST)
3193 max_index = index = tree_to_double_int (cfield);
3194 else
3196 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3197 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3198 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3201 else
3202 max_index = index = double_int_add (index, double_int_one);
3204 /* Do we have match? */
3205 if (double_int_cmp (access_index, index, 1) >= 0
3206 && double_int_cmp (access_index, max_index, 1) <= 0)
3207 return fold_ctor_reference (type, cval, inner_offset, size);
3209 /* When memory is not explicitely mentioned in constructor,
3210 it is 0 (or out of range). */
3211 return build_zero_cst (type);
3214 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3215 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3217 static tree
3218 fold_nonarray_ctor_reference (tree type, tree ctor,
3219 unsigned HOST_WIDE_INT offset,
3220 unsigned HOST_WIDE_INT size)
3222 unsigned HOST_WIDE_INT cnt;
3223 tree cfield, cval;
3225 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3226 cval)
3228 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3229 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3230 tree field_size = DECL_SIZE (cfield);
3231 double_int bitoffset;
3232 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3233 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
3234 double_int bitoffset_end, access_end;
3236 /* Variable sized objects in static constructors makes no sense,
3237 but field_size can be NULL for flexible array members. */
3238 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3239 && TREE_CODE (byte_offset) == INTEGER_CST
3240 && (field_size != NULL_TREE
3241 ? TREE_CODE (field_size) == INTEGER_CST
3242 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3244 /* Compute bit offset of the field. */
3245 bitoffset = double_int_add (tree_to_double_int (field_offset),
3246 double_int_mul (byte_offset_cst,
3247 bits_per_unit_cst));
3248 /* Compute bit offset where the field ends. */
3249 if (field_size != NULL_TREE)
3250 bitoffset_end = double_int_add (bitoffset,
3251 tree_to_double_int (field_size));
3252 else
3253 bitoffset_end = double_int_zero;
3255 access_end = double_int_add (uhwi_to_double_int (offset),
3256 uhwi_to_double_int (size));
3258 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3259 [BITOFFSET, BITOFFSET_END)? */
3260 if (double_int_cmp (access_end, bitoffset, 0) > 0
3261 && (field_size == NULL_TREE
3262 || double_int_cmp (uhwi_to_double_int (offset),
3263 bitoffset_end, 0) < 0))
3265 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
3266 bitoffset);
3267 /* We do have overlap. Now see if field is large enough to
3268 cover the access. Give up for accesses spanning multiple
3269 fields. */
3270 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
3271 return NULL_TREE;
3272 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
3273 return NULL_TREE;
3274 return fold_ctor_reference (type, cval,
3275 double_int_to_uhwi (inner_offset), size);
3278 /* When memory is not explicitely mentioned in constructor, it is 0. */
3279 return build_zero_cst (type);
3282 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3283 to the memory at bit OFFSET. */
3285 static tree
3286 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3287 unsigned HOST_WIDE_INT size)
3289 tree ret;
3291 /* We found the field with exact match. */
3292 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3293 && !offset)
3294 return canonicalize_constructor_val (ctor);
3296 /* We are at the end of walk, see if we can view convert the
3297 result. */
3298 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3299 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3300 && operand_equal_p (TYPE_SIZE (type),
3301 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3303 ret = canonicalize_constructor_val (ctor);
3304 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3305 if (ret)
3306 STRIP_NOPS (ret);
3307 return ret;
3309 if (TREE_CODE (ctor) == STRING_CST)
3310 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3311 if (TREE_CODE (ctor) == CONSTRUCTOR)
3314 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
3315 return fold_array_ctor_reference (type, ctor, offset, size);
3316 else
3317 return fold_nonarray_ctor_reference (type, ctor, offset, size);
3320 return NULL_TREE;
3323 /* Return the tree representing the element referenced by T if T is an
3324 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3325 names using VALUEIZE. Return NULL_TREE otherwise. */
3327 tree
3328 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3330 tree ctor, idx, base;
3331 HOST_WIDE_INT offset, size, max_size;
3332 tree tem;
3334 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3335 return get_symbol_constant_value (t);
3337 tem = fold_read_from_constant_string (t);
3338 if (tem)
3339 return tem;
3341 switch (TREE_CODE (t))
3343 case ARRAY_REF:
3344 case ARRAY_RANGE_REF:
3345 /* Constant indexes are handled well by get_base_constructor.
3346 Only special case variable offsets.
3347 FIXME: This code can't handle nested references with variable indexes
3348 (they will be handled only by iteration of ccp). Perhaps we can bring
3349 get_ref_base_and_extent here and make it use a valueize callback. */
3350 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3351 && valueize
3352 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3353 && host_integerp (idx, 0))
3355 tree low_bound, unit_size;
3357 /* If the resulting bit-offset is constant, track it. */
3358 if ((low_bound = array_ref_low_bound (t),
3359 host_integerp (low_bound, 0))
3360 && (unit_size = array_ref_element_size (t),
3361 host_integerp (unit_size, 1)))
3363 offset = TREE_INT_CST_LOW (idx);
3364 offset -= TREE_INT_CST_LOW (low_bound);
3365 offset *= TREE_INT_CST_LOW (unit_size);
3366 offset *= BITS_PER_UNIT;
3368 base = TREE_OPERAND (t, 0);
3369 ctor = get_base_constructor (base, &offset, valueize);
3370 /* Empty constructor. Always fold to 0. */
3371 if (ctor == error_mark_node)
3372 return build_zero_cst (TREE_TYPE (t));
3373 /* Out of bound array access. Value is undefined,
3374 but don't fold. */
3375 if (offset < 0)
3376 return NULL_TREE;
3377 /* We can not determine ctor. */
3378 if (!ctor)
3379 return NULL_TREE;
3380 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3381 TREE_INT_CST_LOW (unit_size)
3382 * BITS_PER_UNIT);
3385 /* Fallthru. */
3387 case COMPONENT_REF:
3388 case BIT_FIELD_REF:
3389 case TARGET_MEM_REF:
3390 case MEM_REF:
3391 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3392 ctor = get_base_constructor (base, &offset, valueize);
3394 /* Empty constructor. Always fold to 0. */
3395 if (ctor == error_mark_node)
3396 return build_zero_cst (TREE_TYPE (t));
3397 /* We do not know precise address. */
3398 if (max_size == -1 || max_size != size)
3399 return NULL_TREE;
3400 /* We can not determine ctor. */
3401 if (!ctor)
3402 return NULL_TREE;
3404 /* Out of bound array access. Value is undefined, but don't fold. */
3405 if (offset < 0)
3406 return NULL_TREE;
3408 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3410 case REALPART_EXPR:
3411 case IMAGPART_EXPR:
3413 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3414 if (c && TREE_CODE (c) == COMPLEX_CST)
3415 return fold_build1_loc (EXPR_LOCATION (t),
3416 TREE_CODE (t), TREE_TYPE (t), c);
3417 break;
3420 default:
3421 break;
3424 return NULL_TREE;
3427 tree
3428 fold_const_aggregate_ref (tree t)
3430 return fold_const_aggregate_ref_1 (t, NULL);
3433 /* Return true iff VAL is a gimple expression that is known to be
3434 non-negative. Restricted to floating-point inputs. */
3436 bool
3437 gimple_val_nonnegative_real_p (tree val)
3439 gimple def_stmt;
3441 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3443 /* Use existing logic for non-gimple trees. */
3444 if (tree_expr_nonnegative_p (val))
3445 return true;
3447 if (TREE_CODE (val) != SSA_NAME)
3448 return false;
3450 /* Currently we look only at the immediately defining statement
3451 to make this determination, since recursion on defining
3452 statements of operands can lead to quadratic behavior in the
3453 worst case. This is expected to catch almost all occurrences
3454 in practice. It would be possible to implement limited-depth
3455 recursion if important cases are lost. Alternatively, passes
3456 that need this information (such as the pow/powi lowering code
3457 in the cse_sincos pass) could be revised to provide it through
3458 dataflow propagation. */
3460 def_stmt = SSA_NAME_DEF_STMT (val);
3462 if (is_gimple_assign (def_stmt))
3464 tree op0, op1;
3466 /* See fold-const.c:tree_expr_nonnegative_p for additional
3467 cases that could be handled with recursion. */
3469 switch (gimple_assign_rhs_code (def_stmt))
3471 case ABS_EXPR:
3472 /* Always true for floating-point operands. */
3473 return true;
3475 case MULT_EXPR:
3476 /* True if the two operands are identical (since we are
3477 restricted to floating-point inputs). */
3478 op0 = gimple_assign_rhs1 (def_stmt);
3479 op1 = gimple_assign_rhs2 (def_stmt);
3481 if (op0 == op1
3482 || operand_equal_p (op0, op1, 0))
3483 return true;
3485 default:
3486 return false;
3489 else if (is_gimple_call (def_stmt))
3491 tree fndecl = gimple_call_fndecl (def_stmt);
3492 if (fndecl
3493 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3495 tree arg1;
3497 switch (DECL_FUNCTION_CODE (fndecl))
3499 CASE_FLT_FN (BUILT_IN_ACOS):
3500 CASE_FLT_FN (BUILT_IN_ACOSH):
3501 CASE_FLT_FN (BUILT_IN_CABS):
3502 CASE_FLT_FN (BUILT_IN_COSH):
3503 CASE_FLT_FN (BUILT_IN_ERFC):
3504 CASE_FLT_FN (BUILT_IN_EXP):
3505 CASE_FLT_FN (BUILT_IN_EXP10):
3506 CASE_FLT_FN (BUILT_IN_EXP2):
3507 CASE_FLT_FN (BUILT_IN_FABS):
3508 CASE_FLT_FN (BUILT_IN_FDIM):
3509 CASE_FLT_FN (BUILT_IN_HYPOT):
3510 CASE_FLT_FN (BUILT_IN_POW10):
3511 return true;
3513 CASE_FLT_FN (BUILT_IN_SQRT):
3514 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3515 nonnegative inputs. */
3516 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3517 return true;
3519 break;
3521 CASE_FLT_FN (BUILT_IN_POWI):
3522 /* True if the second argument is an even integer. */
3523 arg1 = gimple_call_arg (def_stmt, 1);
3525 if (TREE_CODE (arg1) == INTEGER_CST
3526 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3527 return true;
3529 break;
3531 CASE_FLT_FN (BUILT_IN_POW):
3532 /* True if the second argument is an even integer-valued
3533 real. */
3534 arg1 = gimple_call_arg (def_stmt, 1);
3536 if (TREE_CODE (arg1) == REAL_CST)
3538 REAL_VALUE_TYPE c;
3539 HOST_WIDE_INT n;
3541 c = TREE_REAL_CST (arg1);
3542 n = real_to_integer (&c);
3544 if ((n & 1) == 0)
3546 REAL_VALUE_TYPE cint;
3547 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3548 if (real_identical (&c, &cint))
3549 return true;
3553 break;
3555 default:
3556 return false;
3561 return false;