Pass name cleanups
[official-gcc.git] / gcc / gimple-fold.c
blobc98fd6a0d40d453de3692eb02d2bc5c0c5c6feee
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
109 return true;
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
115 tree
116 canonicalize_constructor_val (tree cval)
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
121 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
122 TREE_OPERAND (cval, 0),
123 TREE_OPERAND (cval, 1),
124 TREE_TYPE (cval));
125 if (t)
126 cval = t;
128 if (TREE_CODE (cval) == ADDR_EXPR)
130 tree base = get_base_address (TREE_OPERAND (cval, 0));
132 if (base
133 && (TREE_CODE (base) == VAR_DECL
134 || TREE_CODE (base) == FUNCTION_DECL)
135 && !can_refer_decl_in_current_unit_p (base))
136 return NULL_TREE;
137 if (cfun && base && TREE_CODE (base) == VAR_DECL)
138 add_referenced_var (base);
139 /* Fixup types in global initializers. */
140 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
141 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
143 return cval;
146 /* If SYM is a constant variable with known value, return the value.
147 NULL_TREE is returned otherwise. */
149 tree
150 get_symbol_constant_value (tree sym)
152 if (const_value_known_p (sym))
154 tree val = DECL_INITIAL (sym);
155 if (val)
157 val = canonicalize_constructor_val (val);
158 if (val && is_gimple_min_invariant (val))
159 return val;
160 else
161 return NULL_TREE;
163 /* Variables declared 'const' without an initializer
164 have zero as the initializer if they may not be
165 overridden at link or run time. */
166 if (!val
167 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
168 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
169 return build_zero_cst (TREE_TYPE (sym));
172 return NULL_TREE;
176 /* Return true if we may propagate the address expression ADDR into the
177 dereference DEREF and cancel them. */
179 bool
180 may_propagate_address_into_dereference (tree addr, tree deref)
182 gcc_assert (TREE_CODE (deref) == MEM_REF
183 && TREE_CODE (addr) == ADDR_EXPR);
185 /* Don't propagate if ADDR's operand has incomplete type. */
186 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
187 return false;
189 /* If the address is invariant then we do not need to preserve restrict
190 qualifications. But we do need to preserve volatile qualifiers until
191 we can annotate the folded dereference itself properly. */
192 if (is_gimple_min_invariant (addr)
193 && (!TREE_THIS_VOLATILE (deref)
194 || TYPE_VOLATILE (TREE_TYPE (addr))))
195 return useless_type_conversion_p (TREE_TYPE (deref),
196 TREE_TYPE (TREE_OPERAND (addr, 0)));
198 /* Else both the address substitution and the folding must result in
199 a valid useless type conversion sequence. */
200 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
201 TREE_TYPE (addr))
202 && useless_type_conversion_p (TREE_TYPE (deref),
203 TREE_TYPE (TREE_OPERAND (addr, 0))));
207 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
208 BASE is an array type. OFFSET is a byte displacement.
210 LOC is the location of the original expression. */
212 static tree
213 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
215 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
216 tree array_type, elt_type, elt_size;
217 tree domain_type;
219 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
220 measured in units of the size of elements type) from that ARRAY_REF).
221 We can't do anything if either is variable.
223 The case we handle here is *(&A[N]+O). */
224 if (TREE_CODE (base) == ARRAY_REF)
226 tree low_bound = array_ref_low_bound (base);
228 elt_offset = TREE_OPERAND (base, 1);
229 if (TREE_CODE (low_bound) != INTEGER_CST
230 || TREE_CODE (elt_offset) != INTEGER_CST)
231 return NULL_TREE;
233 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound);
234 base = TREE_OPERAND (base, 0);
237 /* Ignore stupid user tricks of indexing non-array variables. */
238 array_type = TREE_TYPE (base);
239 if (TREE_CODE (array_type) != ARRAY_TYPE)
240 return NULL_TREE;
241 elt_type = TREE_TYPE (array_type);
243 /* Use signed size type for intermediate computation on the index. */
244 idx_type = ssizetype;
246 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
247 element type (so we can use the alignment if it's not constant).
248 Otherwise, compute the offset as an index by using a division. If the
249 division isn't exact, then don't do anything. */
250 elt_size = TYPE_SIZE_UNIT (elt_type);
251 if (!elt_size)
252 return NULL;
253 if (integer_zerop (offset))
255 if (TREE_CODE (elt_size) != INTEGER_CST)
256 elt_size = size_int (TYPE_ALIGN (elt_type));
258 idx = build_int_cst (idx_type, 0);
260 else
262 unsigned HOST_WIDE_INT lquo, lrem;
263 HOST_WIDE_INT hquo, hrem;
264 double_int soffset;
266 /* The final array offset should be signed, so we need
267 to sign-extend the (possibly pointer) offset here
268 and use signed division. */
269 soffset = double_int_sext (tree_to_double_int (offset),
270 TYPE_PRECISION (TREE_TYPE (offset)));
271 if (TREE_CODE (elt_size) != INTEGER_CST
272 || div_and_round_double (TRUNC_DIV_EXPR, 0,
273 soffset.low, soffset.high,
274 TREE_INT_CST_LOW (elt_size),
275 TREE_INT_CST_HIGH (elt_size),
276 &lquo, &hquo, &lrem, &hrem)
277 || lrem || hrem)
278 return NULL_TREE;
280 idx = build_int_cst_wide (idx_type, lquo, hquo);
283 /* Assume the low bound is zero. If there is a domain type, get the
284 low bound, if any, convert the index into that type, and add the
285 low bound. */
286 min_idx = build_int_cst (idx_type, 0);
287 domain_type = TYPE_DOMAIN (array_type);
288 if (domain_type)
290 idx_type = domain_type;
291 if (TYPE_MIN_VALUE (idx_type))
292 min_idx = TYPE_MIN_VALUE (idx_type);
293 else
294 min_idx = fold_convert (idx_type, min_idx);
296 if (TREE_CODE (min_idx) != INTEGER_CST)
297 return NULL_TREE;
299 elt_offset = fold_convert (idx_type, elt_offset);
302 if (!integer_zerop (min_idx))
303 idx = int_const_binop (PLUS_EXPR, idx, min_idx);
304 if (!integer_zerop (elt_offset))
305 idx = int_const_binop (PLUS_EXPR, idx, elt_offset);
307 /* Make sure to possibly truncate late after offsetting. */
308 idx = fold_convert (idx_type, idx);
310 /* We don't want to construct access past array bounds. For example
311 char *(c[4]);
312 c[3][2];
313 should not be simplified into (*c)[14] or tree-vrp will
314 give false warnings.
315 This is only an issue for multi-dimensional arrays. */
316 if (TREE_CODE (elt_type) == ARRAY_TYPE
317 && domain_type)
319 if (TYPE_MAX_VALUE (domain_type)
320 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
321 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
322 return NULL_TREE;
323 else if (TYPE_MIN_VALUE (domain_type)
324 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
325 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
326 return NULL_TREE;
327 else if (compare_tree_int (idx, 0) < 0)
328 return NULL_TREE;
332 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
333 SET_EXPR_LOCATION (t, loc);
334 return t;
339 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
340 LOC is the location of original expression.
342 Before attempting the conversion strip off existing ADDR_EXPRs. */
344 tree
345 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
346 tree orig_type)
348 tree ret;
350 STRIP_NOPS (base);
351 if (TREE_CODE (base) != ADDR_EXPR)
352 return NULL_TREE;
354 base = TREE_OPERAND (base, 0);
355 if (types_compatible_p (orig_type, TREE_TYPE (base))
356 && integer_zerop (offset))
357 return base;
359 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
360 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
361 return ret;
362 return NULL_TREE;
365 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
366 LOC is the location of the original expression. */
368 tree
369 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
370 tree orig_type)
372 tree base, ret;
374 STRIP_NOPS (addr);
375 if (TREE_CODE (addr) != ADDR_EXPR)
376 return NULL_TREE;
377 base = TREE_OPERAND (addr, 0);
378 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
379 if (ret)
381 ret = build_fold_addr_expr (ret);
382 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
383 return NULL_TREE;
384 SET_EXPR_LOCATION (ret, loc);
387 return ret;
391 /* A quaint feature extant in our address arithmetic is that there
392 can be hidden type changes here. The type of the result need
393 not be the same as the type of the input pointer.
395 What we're after here is an expression of the form
396 (T *)(&array + const)
397 where array is OP0, const is OP1, RES_TYPE is T and
398 the cast doesn't actually exist, but is implicit in the
399 type of the POINTER_PLUS_EXPR. We'd like to turn this into
400 &array[x]
401 which may be able to propagate further. */
403 tree
404 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
406 tree ptd_type;
407 tree t;
409 /* The first operand should be an ADDR_EXPR. */
410 if (TREE_CODE (op0) != ADDR_EXPR)
411 return NULL_TREE;
412 op0 = TREE_OPERAND (op0, 0);
414 /* It had better be a constant. */
415 if (TREE_CODE (op1) != INTEGER_CST)
417 /* Or op0 should now be A[0] and the non-constant offset defined
418 via a multiplication by the array element size. */
419 if (TREE_CODE (op0) == ARRAY_REF
420 /* As we will end up creating a variable index array access
421 in the outermost array dimension make sure there isn't
422 a more inner array that the index could overflow to. */
423 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
424 && integer_zerop (TREE_OPERAND (op0, 1))
425 && TREE_CODE (op1) == SSA_NAME)
427 gimple offset_def = SSA_NAME_DEF_STMT (op1);
428 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
429 if (!host_integerp (elsz, 1)
430 || !is_gimple_assign (offset_def))
431 return NULL_TREE;
433 /* Do not build array references of something that we can't
434 see the true number of array dimensions for. */
435 if (!DECL_P (TREE_OPERAND (op0, 0))
436 && !handled_component_p (TREE_OPERAND (op0, 0)))
437 return NULL_TREE;
439 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
440 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
441 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
442 return build_fold_addr_expr
443 (build4 (ARRAY_REF, TREE_TYPE (op0),
444 TREE_OPERAND (op0, 0),
445 gimple_assign_rhs1 (offset_def),
446 TREE_OPERAND (op0, 2),
447 TREE_OPERAND (op0, 3)));
448 else if (integer_onep (elsz)
449 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
450 return build_fold_addr_expr
451 (build4 (ARRAY_REF, TREE_TYPE (op0),
452 TREE_OPERAND (op0, 0),
453 op1,
454 TREE_OPERAND (op0, 2),
455 TREE_OPERAND (op0, 3)));
457 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
458 /* Dto. */
459 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
460 && TREE_CODE (op1) == SSA_NAME)
462 gimple offset_def = SSA_NAME_DEF_STMT (op1);
463 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
464 if (!host_integerp (elsz, 1)
465 || !is_gimple_assign (offset_def))
466 return NULL_TREE;
468 /* Do not build array references of something that we can't
469 see the true number of array dimensions for. */
470 if (!DECL_P (op0)
471 && !handled_component_p (op0))
472 return NULL_TREE;
474 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
475 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
476 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
477 return build_fold_addr_expr
478 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
479 op0, gimple_assign_rhs1 (offset_def),
480 integer_zero_node, NULL_TREE));
481 else if (integer_onep (elsz)
482 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
483 return build_fold_addr_expr
484 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
485 op0, op1,
486 integer_zero_node, NULL_TREE));
489 return NULL_TREE;
492 /* If the first operand is an ARRAY_REF, expand it so that we can fold
493 the offset into it. */
494 while (TREE_CODE (op0) == ARRAY_REF)
496 tree array_obj = TREE_OPERAND (op0, 0);
497 tree array_idx = TREE_OPERAND (op0, 1);
498 tree elt_type = TREE_TYPE (op0);
499 tree elt_size = TYPE_SIZE_UNIT (elt_type);
500 tree min_idx;
502 if (TREE_CODE (array_idx) != INTEGER_CST)
503 break;
504 if (TREE_CODE (elt_size) != INTEGER_CST)
505 break;
507 /* Un-bias the index by the min index of the array type. */
508 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
509 if (min_idx)
511 min_idx = TYPE_MIN_VALUE (min_idx);
512 if (min_idx)
514 if (TREE_CODE (min_idx) != INTEGER_CST)
515 break;
517 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
518 if (!integer_zerop (min_idx))
519 array_idx = int_const_binop (MINUS_EXPR, array_idx,
520 min_idx);
524 /* Convert the index to a byte offset. */
525 array_idx = fold_convert (sizetype, array_idx);
526 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size);
528 /* Update the operands for the next round, or for folding. */
529 op1 = int_const_binop (PLUS_EXPR,
530 array_idx, op1);
531 op0 = array_obj;
534 ptd_type = TREE_TYPE (res_type);
535 /* If we want a pointer to void, reconstruct the reference from the
536 array element type. A pointer to that can be trivially converted
537 to void *. This happens as we fold (void *)(ptr p+ off). */
538 if (VOID_TYPE_P (ptd_type)
539 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
540 ptd_type = TREE_TYPE (TREE_TYPE (op0));
542 /* At which point we can try some of the same things as for indirects. */
543 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
544 if (t)
546 t = build_fold_addr_expr (t);
547 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
548 return NULL_TREE;
549 SET_EXPR_LOCATION (t, loc);
552 return t;
555 /* Subroutine of fold_stmt. We perform several simplifications of the
556 memory reference tree EXPR and make sure to re-gimplify them properly
557 after propagation of constant addresses. IS_LHS is true if the
558 reference is supposed to be an lvalue. */
560 static tree
561 maybe_fold_reference (tree expr, bool is_lhs)
563 tree *t = &expr;
564 tree result;
566 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
567 || TREE_CODE (expr) == REALPART_EXPR
568 || TREE_CODE (expr) == IMAGPART_EXPR)
569 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
570 return fold_unary_loc (EXPR_LOCATION (expr),
571 TREE_CODE (expr),
572 TREE_TYPE (expr),
573 TREE_OPERAND (expr, 0));
574 else if (TREE_CODE (expr) == BIT_FIELD_REF
575 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
576 return fold_ternary_loc (EXPR_LOCATION (expr),
577 TREE_CODE (expr),
578 TREE_TYPE (expr),
579 TREE_OPERAND (expr, 0),
580 TREE_OPERAND (expr, 1),
581 TREE_OPERAND (expr, 2));
583 while (handled_component_p (*t))
584 t = &TREE_OPERAND (*t, 0);
586 /* Canonicalize MEM_REFs invariant address operand. Do this first
587 to avoid feeding non-canonical MEM_REFs elsewhere. */
588 if (TREE_CODE (*t) == MEM_REF
589 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
591 bool volatile_p = TREE_THIS_VOLATILE (*t);
592 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
593 TREE_OPERAND (*t, 0),
594 TREE_OPERAND (*t, 1));
595 if (tem)
597 TREE_THIS_VOLATILE (tem) = volatile_p;
598 *t = tem;
599 tem = maybe_fold_reference (expr, is_lhs);
600 if (tem)
601 return tem;
602 return expr;
606 if (!is_lhs
607 && (result = fold_const_aggregate_ref (expr))
608 && is_gimple_min_invariant (result))
609 return result;
611 /* Fold back MEM_REFs to reference trees. */
612 if (TREE_CODE (*t) == MEM_REF
613 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
614 && integer_zerop (TREE_OPERAND (*t, 1))
615 && (TREE_THIS_VOLATILE (*t)
616 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
617 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
618 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
619 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
620 /* We have to look out here to not drop a required conversion
621 from the rhs to the lhs if is_lhs, but we don't have the
622 rhs here to verify that. Thus require strict type
623 compatibility. */
624 && types_compatible_p (TREE_TYPE (*t),
625 TREE_TYPE (TREE_OPERAND
626 (TREE_OPERAND (*t, 0), 0))))
628 tree tem;
629 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
630 tem = maybe_fold_reference (expr, is_lhs);
631 if (tem)
632 return tem;
633 return expr;
635 else if (TREE_CODE (*t) == TARGET_MEM_REF)
637 tree tem = maybe_fold_tmr (*t);
638 if (tem)
640 *t = tem;
641 tem = maybe_fold_reference (expr, is_lhs);
642 if (tem)
643 return tem;
644 return expr;
648 return NULL_TREE;
652 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
653 replacement rhs for the statement or NULL_TREE if no simplification
654 could be made. It is assumed that the operands have been previously
655 folded. */
657 static tree
658 fold_gimple_assign (gimple_stmt_iterator *si)
660 gimple stmt = gsi_stmt (*si);
661 enum tree_code subcode = gimple_assign_rhs_code (stmt);
662 location_t loc = gimple_location (stmt);
664 tree result = NULL_TREE;
666 switch (get_gimple_rhs_class (subcode))
668 case GIMPLE_SINGLE_RHS:
670 tree rhs = gimple_assign_rhs1 (stmt);
672 /* Try to fold a conditional expression. */
673 if (TREE_CODE (rhs) == COND_EXPR)
675 tree op0 = COND_EXPR_COND (rhs);
676 tree tem;
677 bool set = false;
678 location_t cond_loc = EXPR_LOCATION (rhs);
680 if (COMPARISON_CLASS_P (op0))
682 fold_defer_overflow_warnings ();
683 tem = fold_binary_loc (cond_loc,
684 TREE_CODE (op0), TREE_TYPE (op0),
685 TREE_OPERAND (op0, 0),
686 TREE_OPERAND (op0, 1));
687 /* This is actually a conditional expression, not a GIMPLE
688 conditional statement, however, the valid_gimple_rhs_p
689 test still applies. */
690 set = (tem && is_gimple_condexpr (tem)
691 && valid_gimple_rhs_p (tem));
692 fold_undefer_overflow_warnings (set, stmt, 0);
694 else if (is_gimple_min_invariant (op0))
696 tem = op0;
697 set = true;
699 else
700 return NULL_TREE;
702 if (set)
703 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
704 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
707 else if (REFERENCE_CLASS_P (rhs))
708 return maybe_fold_reference (rhs, false);
710 else if (TREE_CODE (rhs) == ADDR_EXPR)
712 tree ref = TREE_OPERAND (rhs, 0);
713 tree tem = maybe_fold_reference (ref, true);
714 if (tem
715 && TREE_CODE (tem) == MEM_REF
716 && integer_zerop (TREE_OPERAND (tem, 1)))
717 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
718 else if (tem)
719 result = fold_convert (TREE_TYPE (rhs),
720 build_fold_addr_expr_loc (loc, tem));
721 else if (TREE_CODE (ref) == MEM_REF
722 && integer_zerop (TREE_OPERAND (ref, 1)))
723 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
726 else if (TREE_CODE (rhs) == CONSTRUCTOR
727 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
728 && (CONSTRUCTOR_NELTS (rhs)
729 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
731 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
732 unsigned i;
733 tree val;
735 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
736 if (TREE_CODE (val) != INTEGER_CST
737 && TREE_CODE (val) != REAL_CST
738 && TREE_CODE (val) != FIXED_CST)
739 return NULL_TREE;
741 return build_vector_from_ctor (TREE_TYPE (rhs),
742 CONSTRUCTOR_ELTS (rhs));
745 else if (DECL_P (rhs))
746 return unshare_expr (get_symbol_constant_value (rhs));
748 /* If we couldn't fold the RHS, hand over to the generic
749 fold routines. */
750 if (result == NULL_TREE)
751 result = fold (rhs);
753 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
754 that may have been added by fold, and "useless" type
755 conversions that might now be apparent due to propagation. */
756 STRIP_USELESS_TYPE_CONVERSION (result);
758 if (result != rhs && valid_gimple_rhs_p (result))
759 return result;
761 return NULL_TREE;
763 break;
765 case GIMPLE_UNARY_RHS:
767 tree rhs = gimple_assign_rhs1 (stmt);
769 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
770 if (result)
772 /* If the operation was a conversion do _not_ mark a
773 resulting constant with TREE_OVERFLOW if the original
774 constant was not. These conversions have implementation
775 defined behavior and retaining the TREE_OVERFLOW flag
776 here would confuse later passes such as VRP. */
777 if (CONVERT_EXPR_CODE_P (subcode)
778 && TREE_CODE (result) == INTEGER_CST
779 && TREE_CODE (rhs) == INTEGER_CST)
780 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
782 STRIP_USELESS_TYPE_CONVERSION (result);
783 if (valid_gimple_rhs_p (result))
784 return result;
786 else if (CONVERT_EXPR_CODE_P (subcode)
787 && POINTER_TYPE_P (gimple_expr_type (stmt))
788 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
790 tree type = gimple_expr_type (stmt);
791 tree t = maybe_fold_offset_to_address (loc,
792 gimple_assign_rhs1 (stmt),
793 integer_zero_node, type);
794 if (t)
795 return t;
798 break;
800 case GIMPLE_BINARY_RHS:
801 /* Try to fold pointer addition. */
802 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
804 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
805 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
807 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
808 if (!useless_type_conversion_p
809 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
810 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
812 result = maybe_fold_stmt_addition (gimple_location (stmt),
813 type,
814 gimple_assign_rhs1 (stmt),
815 gimple_assign_rhs2 (stmt));
818 if (!result)
819 result = fold_binary_loc (loc, subcode,
820 TREE_TYPE (gimple_assign_lhs (stmt)),
821 gimple_assign_rhs1 (stmt),
822 gimple_assign_rhs2 (stmt));
824 if (result)
826 STRIP_USELESS_TYPE_CONVERSION (result);
827 if (valid_gimple_rhs_p (result))
828 return result;
830 /* Fold might have produced non-GIMPLE, so if we trust it blindly
831 we lose canonicalization opportunities. Do not go again
832 through fold here though, or the same non-GIMPLE will be
833 produced. */
834 if (commutative_tree_code (subcode)
835 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
836 gimple_assign_rhs2 (stmt), false))
837 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
838 gimple_assign_rhs2 (stmt),
839 gimple_assign_rhs1 (stmt));
841 break;
843 case GIMPLE_TERNARY_RHS:
844 result = fold_ternary_loc (loc, subcode,
845 TREE_TYPE (gimple_assign_lhs (stmt)),
846 gimple_assign_rhs1 (stmt),
847 gimple_assign_rhs2 (stmt),
848 gimple_assign_rhs3 (stmt));
850 if (result)
852 STRIP_USELESS_TYPE_CONVERSION (result);
853 if (valid_gimple_rhs_p (result))
854 return result;
856 /* Fold might have produced non-GIMPLE, so if we trust it blindly
857 we lose canonicalization opportunities. Do not go again
858 through fold here though, or the same non-GIMPLE will be
859 produced. */
860 if (commutative_ternary_tree_code (subcode)
861 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
862 gimple_assign_rhs2 (stmt), false))
863 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
864 gimple_assign_rhs2 (stmt),
865 gimple_assign_rhs1 (stmt),
866 gimple_assign_rhs3 (stmt));
868 break;
870 case GIMPLE_INVALID_RHS:
871 gcc_unreachable ();
874 return NULL_TREE;
877 /* Attempt to fold a conditional statement. Return true if any changes were
878 made. We only attempt to fold the condition expression, and do not perform
879 any transformation that would require alteration of the cfg. It is
880 assumed that the operands have been previously folded. */
882 static bool
883 fold_gimple_cond (gimple stmt)
885 tree result = fold_binary_loc (gimple_location (stmt),
886 gimple_cond_code (stmt),
887 boolean_type_node,
888 gimple_cond_lhs (stmt),
889 gimple_cond_rhs (stmt));
891 if (result)
893 STRIP_USELESS_TYPE_CONVERSION (result);
894 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
896 gimple_cond_set_condition_from_tree (stmt, result);
897 return true;
901 return false;
904 /* Convert EXPR into a GIMPLE value suitable for substitution on the
905 RHS of an assignment. Insert the necessary statements before
906 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
907 is replaced. If the call is expected to produces a result, then it
908 is replaced by an assignment of the new RHS to the result variable.
909 If the result is to be ignored, then the call is replaced by a
910 GIMPLE_NOP. A proper VDEF chain is retained by making the first
911 VUSE and the last VDEF of the whole sequence be the same as the replaced
912 statement and using new SSA names for stores in between. */
914 void
915 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
917 tree lhs;
918 tree tmp = NULL_TREE; /* Silence warning. */
919 gimple stmt, new_stmt;
920 gimple_stmt_iterator i;
921 gimple_seq stmts = gimple_seq_alloc();
922 struct gimplify_ctx gctx;
923 gimple last = NULL;
924 gimple laststore = NULL;
925 tree reaching_vuse;
927 stmt = gsi_stmt (*si_p);
929 gcc_assert (is_gimple_call (stmt));
931 lhs = gimple_call_lhs (stmt);
932 reaching_vuse = gimple_vuse (stmt);
934 push_gimplify_context (&gctx);
936 if (lhs == NULL_TREE)
938 gimplify_and_add (expr, &stmts);
939 /* We can end up with folding a memcpy of an empty class assignment
940 which gets optimized away by C++ gimplification. */
941 if (gimple_seq_empty_p (stmts))
943 pop_gimplify_context (NULL);
944 if (gimple_in_ssa_p (cfun))
946 unlink_stmt_vdef (stmt);
947 release_defs (stmt);
949 gsi_remove (si_p, true);
950 return;
953 else
954 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
956 pop_gimplify_context (NULL);
958 if (gimple_has_location (stmt))
959 annotate_all_with_location (stmts, gimple_location (stmt));
961 /* The replacement can expose previously unreferenced variables. */
962 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
964 if (last)
966 gsi_insert_before (si_p, last, GSI_NEW_STMT);
967 gsi_next (si_p);
969 new_stmt = gsi_stmt (i);
970 if (gimple_in_ssa_p (cfun))
972 find_new_referenced_vars (new_stmt);
973 mark_symbols_for_renaming (new_stmt);
975 /* If the new statement has a VUSE, update it with exact SSA name we
976 know will reach this one. */
977 if (gimple_vuse (new_stmt))
979 /* If we've also seen a previous store create a new VDEF for
980 the latter one, and make that the new reaching VUSE. */
981 if (laststore)
983 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
984 gimple_set_vdef (laststore, reaching_vuse);
985 update_stmt (laststore);
986 laststore = NULL;
988 gimple_set_vuse (new_stmt, reaching_vuse);
989 gimple_set_modified (new_stmt, true);
991 if (gimple_assign_single_p (new_stmt)
992 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
994 laststore = new_stmt;
996 last = new_stmt;
999 if (lhs == NULL_TREE)
1001 /* If we replace a call without LHS that has a VDEF and our new
1002 sequence ends with a store we must make that store have the same
1003 vdef in order not to break the sequencing. This can happen
1004 for instance when folding memcpy calls into assignments. */
1005 if (gimple_vdef (stmt) && laststore)
1007 gimple_set_vdef (laststore, gimple_vdef (stmt));
1008 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1009 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1010 update_stmt (laststore);
1012 else if (gimple_in_ssa_p (cfun))
1014 unlink_stmt_vdef (stmt);
1015 release_defs (stmt);
1017 new_stmt = last;
1019 else
1021 if (last)
1023 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1024 gsi_next (si_p);
1026 if (laststore && is_gimple_reg (lhs))
1028 gimple_set_vdef (laststore, gimple_vdef (stmt));
1029 update_stmt (laststore);
1030 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1031 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1032 laststore = NULL;
1034 else if (laststore)
1036 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1037 gimple_set_vdef (laststore, reaching_vuse);
1038 update_stmt (laststore);
1039 laststore = NULL;
1041 new_stmt = gimple_build_assign (lhs, tmp);
1042 if (!is_gimple_reg (tmp))
1043 gimple_set_vuse (new_stmt, reaching_vuse);
1044 if (!is_gimple_reg (lhs))
1046 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1047 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1048 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1050 else if (reaching_vuse == gimple_vuse (stmt))
1051 unlink_stmt_vdef (stmt);
1054 gimple_set_location (new_stmt, gimple_location (stmt));
1055 gsi_replace (si_p, new_stmt, false);
1058 /* Return the string length, maximum string length or maximum value of
1059 ARG in LENGTH.
1060 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1061 is not NULL and, for TYPE == 0, its value is not equal to the length
1062 we determine or if we are unable to determine the length or value,
1063 return false. VISITED is a bitmap of visited variables.
1064 TYPE is 0 if string length should be returned, 1 for maximum string
1065 length and 2 for maximum value ARG can have. */
1067 static bool
1068 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1070 tree var, val;
1071 gimple def_stmt;
1073 if (TREE_CODE (arg) != SSA_NAME)
1075 if (TREE_CODE (arg) == COND_EXPR)
1076 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1077 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1078 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1079 else if (TREE_CODE (arg) == ADDR_EXPR
1080 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1081 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1083 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1084 if (TREE_CODE (aop0) == INDIRECT_REF
1085 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1086 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1087 length, visited, type);
1090 if (type == 2)
1092 val = arg;
1093 if (TREE_CODE (val) != INTEGER_CST
1094 || tree_int_cst_sgn (val) < 0)
1095 return false;
1097 else
1098 val = c_strlen (arg, 1);
1099 if (!val)
1100 return false;
1102 if (*length)
1104 if (type > 0)
1106 if (TREE_CODE (*length) != INTEGER_CST
1107 || TREE_CODE (val) != INTEGER_CST)
1108 return false;
1110 if (tree_int_cst_lt (*length, val))
1111 *length = val;
1112 return true;
1114 else if (simple_cst_equal (val, *length) != 1)
1115 return false;
1118 *length = val;
1119 return true;
1122 /* If we were already here, break the infinite cycle. */
1123 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1124 return true;
1126 var = arg;
1127 def_stmt = SSA_NAME_DEF_STMT (var);
1129 switch (gimple_code (def_stmt))
1131 case GIMPLE_ASSIGN:
1132 /* The RHS of the statement defining VAR must either have a
1133 constant length or come from another SSA_NAME with a constant
1134 length. */
1135 if (gimple_assign_single_p (def_stmt)
1136 || gimple_assign_unary_nop_p (def_stmt))
1138 tree rhs = gimple_assign_rhs1 (def_stmt);
1139 return get_maxval_strlen (rhs, length, visited, type);
1141 return false;
1143 case GIMPLE_PHI:
1145 /* All the arguments of the PHI node must have the same constant
1146 length. */
1147 unsigned i;
1149 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1151 tree arg = gimple_phi_arg (def_stmt, i)->def;
1153 /* If this PHI has itself as an argument, we cannot
1154 determine the string length of this argument. However,
1155 if we can find a constant string length for the other
1156 PHI args then we can still be sure that this is a
1157 constant string length. So be optimistic and just
1158 continue with the next argument. */
1159 if (arg == gimple_phi_result (def_stmt))
1160 continue;
1162 if (!get_maxval_strlen (arg, length, visited, type))
1163 return false;
1166 return true;
1168 default:
1169 return false;
1174 /* Fold builtin call in statement STMT. Returns a simplified tree.
1175 We may return a non-constant expression, including another call
1176 to a different function and with different arguments, e.g.,
1177 substituting memcpy for strcpy when the string length is known.
1178 Note that some builtins expand into inline code that may not
1179 be valid in GIMPLE. Callers must take care. */
1181 tree
1182 gimple_fold_builtin (gimple stmt)
1184 tree result, val[3];
1185 tree callee, a;
1186 int arg_idx, type;
1187 bitmap visited;
1188 bool ignore;
1189 int nargs;
1190 location_t loc = gimple_location (stmt);
1192 gcc_assert (is_gimple_call (stmt));
1194 ignore = (gimple_call_lhs (stmt) == NULL);
1196 /* First try the generic builtin folder. If that succeeds, return the
1197 result directly. */
1198 result = fold_call_stmt (stmt, ignore);
1199 if (result)
1201 if (ignore)
1202 STRIP_NOPS (result);
1203 return result;
1206 /* Ignore MD builtins. */
1207 callee = gimple_call_fndecl (stmt);
1208 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1209 return NULL_TREE;
1211 /* If the builtin could not be folded, and it has no argument list,
1212 we're done. */
1213 nargs = gimple_call_num_args (stmt);
1214 if (nargs == 0)
1215 return NULL_TREE;
1217 /* Limit the work only for builtins we know how to simplify. */
1218 switch (DECL_FUNCTION_CODE (callee))
1220 case BUILT_IN_STRLEN:
1221 case BUILT_IN_FPUTS:
1222 case BUILT_IN_FPUTS_UNLOCKED:
1223 arg_idx = 0;
1224 type = 0;
1225 break;
1226 case BUILT_IN_STRCPY:
1227 case BUILT_IN_STRNCPY:
1228 arg_idx = 1;
1229 type = 0;
1230 break;
1231 case BUILT_IN_MEMCPY_CHK:
1232 case BUILT_IN_MEMPCPY_CHK:
1233 case BUILT_IN_MEMMOVE_CHK:
1234 case BUILT_IN_MEMSET_CHK:
1235 case BUILT_IN_STRNCPY_CHK:
1236 arg_idx = 2;
1237 type = 2;
1238 break;
1239 case BUILT_IN_STRCPY_CHK:
1240 case BUILT_IN_STPCPY_CHK:
1241 arg_idx = 1;
1242 type = 1;
1243 break;
1244 case BUILT_IN_SNPRINTF_CHK:
1245 case BUILT_IN_VSNPRINTF_CHK:
1246 arg_idx = 1;
1247 type = 2;
1248 break;
1249 default:
1250 return NULL_TREE;
1253 if (arg_idx >= nargs)
1254 return NULL_TREE;
1256 /* Try to use the dataflow information gathered by the CCP process. */
1257 visited = BITMAP_ALLOC (NULL);
1258 bitmap_clear (visited);
1260 memset (val, 0, sizeof (val));
1261 a = gimple_call_arg (stmt, arg_idx);
1262 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1263 val[arg_idx] = NULL_TREE;
1265 BITMAP_FREE (visited);
1267 result = NULL_TREE;
1268 switch (DECL_FUNCTION_CODE (callee))
1270 case BUILT_IN_STRLEN:
1271 if (val[0] && nargs == 1)
1273 tree new_val =
1274 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1276 /* If the result is not a valid gimple value, or not a cast
1277 of a valid gimple value, then we cannot use the result. */
1278 if (is_gimple_val (new_val)
1279 || (CONVERT_EXPR_P (new_val)
1280 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1281 return new_val;
1283 break;
1285 case BUILT_IN_STRCPY:
1286 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1287 result = fold_builtin_strcpy (loc, callee,
1288 gimple_call_arg (stmt, 0),
1289 gimple_call_arg (stmt, 1),
1290 val[1]);
1291 break;
1293 case BUILT_IN_STRNCPY:
1294 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1295 result = fold_builtin_strncpy (loc, callee,
1296 gimple_call_arg (stmt, 0),
1297 gimple_call_arg (stmt, 1),
1298 gimple_call_arg (stmt, 2),
1299 val[1]);
1300 break;
1302 case BUILT_IN_FPUTS:
1303 if (nargs == 2)
1304 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1305 gimple_call_arg (stmt, 1),
1306 ignore, false, val[0]);
1307 break;
1309 case BUILT_IN_FPUTS_UNLOCKED:
1310 if (nargs == 2)
1311 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1312 gimple_call_arg (stmt, 1),
1313 ignore, true, val[0]);
1314 break;
1316 case BUILT_IN_MEMCPY_CHK:
1317 case BUILT_IN_MEMPCPY_CHK:
1318 case BUILT_IN_MEMMOVE_CHK:
1319 case BUILT_IN_MEMSET_CHK:
1320 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1321 result = fold_builtin_memory_chk (loc, callee,
1322 gimple_call_arg (stmt, 0),
1323 gimple_call_arg (stmt, 1),
1324 gimple_call_arg (stmt, 2),
1325 gimple_call_arg (stmt, 3),
1326 val[2], ignore,
1327 DECL_FUNCTION_CODE (callee));
1328 break;
1330 case BUILT_IN_STRCPY_CHK:
1331 case BUILT_IN_STPCPY_CHK:
1332 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1333 result = fold_builtin_stxcpy_chk (loc, callee,
1334 gimple_call_arg (stmt, 0),
1335 gimple_call_arg (stmt, 1),
1336 gimple_call_arg (stmt, 2),
1337 val[1], ignore,
1338 DECL_FUNCTION_CODE (callee));
1339 break;
1341 case BUILT_IN_STRNCPY_CHK:
1342 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1343 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1344 gimple_call_arg (stmt, 1),
1345 gimple_call_arg (stmt, 2),
1346 gimple_call_arg (stmt, 3),
1347 val[2]);
1348 break;
1350 case BUILT_IN_SNPRINTF_CHK:
1351 case BUILT_IN_VSNPRINTF_CHK:
1352 if (val[1] && is_gimple_val (val[1]))
1353 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1354 DECL_FUNCTION_CODE (callee));
1355 break;
1357 default:
1358 gcc_unreachable ();
1361 if (result && ignore)
1362 result = fold_ignored_result (result);
1363 return result;
1366 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1367 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1368 KNOWN_BINFO carries the binfo describing the true type of
1369 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1370 with a this adjustment, the constant which should be added to this pointer
1371 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1372 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1374 tree
1375 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
1376 tree *delta, bool refuse_thunks)
1378 HOST_WIDE_INT i;
1379 tree v, fndecl;
1380 struct cgraph_node *node;
1382 v = BINFO_VIRTUALS (known_binfo);
1383 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1384 if (!v)
1385 return NULL_TREE;
1386 i = 0;
1387 while (i != token)
1389 i += (TARGET_VTABLE_USES_DESCRIPTORS
1390 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1391 v = TREE_CHAIN (v);
1394 /* If BV_VCALL_INDEX is non-NULL, give up. */
1395 if (TREE_TYPE (v))
1396 return NULL_TREE;
1398 fndecl = TREE_VALUE (v);
1399 node = cgraph_get_node_or_alias (fndecl);
1400 if (refuse_thunks
1401 && (!node
1402 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1403 thunks are represented by a constant in TREE_PURPOSE of items in
1404 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1405 yet.
1407 FIXME: Remove the following condition once we are able to represent
1408 thunk information on call graph edges. */
1409 || (node->same_body_alias && node->thunk.thunk_p)))
1410 return NULL_TREE;
1412 /* When cgraph node is missing and function is not public, we cannot
1413 devirtualize. This can happen in WHOPR when the actual method
1414 ends up in other partition, because we found devirtualization
1415 possibility too late. */
1416 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1417 return NULL_TREE;
1419 *delta = TREE_PURPOSE (v);
1420 gcc_checking_assert (host_integerp (*delta, 0));
1421 return fndecl;
1424 /* Generate code adjusting the first parameter of a call statement determined
1425 by GSI by DELTA. */
1427 void
1428 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1430 gimple call_stmt = gsi_stmt (*gsi);
1431 tree parm, tmp;
1432 gimple new_stmt;
1434 delta = fold_convert (sizetype, delta);
1435 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1436 parm = gimple_call_arg (call_stmt, 0);
1437 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1438 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1439 add_referenced_var (tmp);
1441 tmp = make_ssa_name (tmp, NULL);
1442 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1443 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1444 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1445 gimple_call_set_arg (call_stmt, 0, tmp);
1448 /* Return a binfo to be used for devirtualization of calls based on an object
1449 represented by a declaration (i.e. a global or automatically allocated one)
1450 or NULL if it cannot be found or is not safe. CST is expected to be an
1451 ADDR_EXPR of such object or the function will return NULL. Currently it is
1452 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1454 tree
1455 gimple_extract_devirt_binfo_from_cst (tree cst)
1457 HOST_WIDE_INT offset, size, max_size;
1458 tree base, type, expected_type, binfo;
1459 bool last_artificial = false;
1461 if (!flag_devirtualize
1462 || TREE_CODE (cst) != ADDR_EXPR
1463 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1464 return NULL_TREE;
1466 cst = TREE_OPERAND (cst, 0);
1467 expected_type = TREE_TYPE (cst);
1468 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1469 type = TREE_TYPE (base);
1470 if (!DECL_P (base)
1471 || max_size == -1
1472 || max_size != size
1473 || TREE_CODE (type) != RECORD_TYPE)
1474 return NULL_TREE;
1476 /* Find the sub-object the constant actually refers to and mark whether it is
1477 an artificial one (as opposed to a user-defined one). */
1478 while (true)
1480 HOST_WIDE_INT pos, size;
1481 tree fld;
1483 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1484 break;
1485 if (offset < 0)
1486 return NULL_TREE;
1488 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1490 if (TREE_CODE (fld) != FIELD_DECL)
1491 continue;
1493 pos = int_bit_position (fld);
1494 size = tree_low_cst (DECL_SIZE (fld), 1);
1495 if (pos <= offset && (pos + size) > offset)
1496 break;
1498 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1499 return NULL_TREE;
1501 last_artificial = DECL_ARTIFICIAL (fld);
1502 type = TREE_TYPE (fld);
1503 offset -= pos;
1505 /* Artifical sub-objects are ancestors, we do not want to use them for
1506 devirtualization, at least not here. */
1507 if (last_artificial)
1508 return NULL_TREE;
1509 binfo = TYPE_BINFO (type);
1510 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1511 return NULL_TREE;
1512 else
1513 return binfo;
1516 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1517 The statement may be replaced by another statement, e.g., if the call
1518 simplifies to a constant value. Return true if any changes were made.
1519 It is assumed that the operands have been previously folded. */
1521 bool
1522 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1524 gimple stmt = gsi_stmt (*gsi);
1525 tree callee;
1527 /* Check for builtins that CCP can handle using information not
1528 available in the generic fold routines. */
1529 callee = gimple_call_fndecl (stmt);
1530 if (!inplace && callee && DECL_BUILT_IN (callee))
1532 tree result = gimple_fold_builtin (stmt);
1534 if (result)
1536 if (!update_call_from_tree (gsi, result))
1537 gimplify_and_update_call_from_tree (gsi, result);
1538 return true;
1542 /* Check for virtual calls that became direct calls. */
1543 callee = gimple_call_fn (stmt);
1544 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1546 tree binfo, fndecl, delta, obj;
1547 HOST_WIDE_INT token;
1549 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1551 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1552 return true;
1555 obj = OBJ_TYPE_REF_OBJECT (callee);
1556 binfo = gimple_extract_devirt_binfo_from_cst (obj);
1557 if (!binfo)
1558 return false;
1559 token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1560 fndecl = gimple_get_virt_method_for_binfo (token, binfo, &delta, false);
1561 if (!fndecl)
1562 return false;
1563 gcc_assert (integer_zerop (delta));
1564 gimple_call_set_fndecl (stmt, fndecl);
1565 return true;
1568 return false;
1571 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1572 distinguishes both cases. */
1574 static bool
1575 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1577 bool changed = false;
1578 gimple stmt = gsi_stmt (*gsi);
1579 unsigned i;
1581 /* Fold the main computation performed by the statement. */
1582 switch (gimple_code (stmt))
1584 case GIMPLE_ASSIGN:
1586 unsigned old_num_ops = gimple_num_ops (stmt);
1587 tree new_rhs = fold_gimple_assign (gsi);
1588 tree lhs = gimple_assign_lhs (stmt);
1589 if (new_rhs
1590 && !useless_type_conversion_p (TREE_TYPE (lhs),
1591 TREE_TYPE (new_rhs)))
1592 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1593 if (new_rhs
1594 && (!inplace
1595 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1597 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1598 changed = true;
1600 break;
1603 case GIMPLE_COND:
1604 changed |= fold_gimple_cond (stmt);
1605 break;
1607 case GIMPLE_CALL:
1608 /* Fold *& in call arguments. */
1609 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1610 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1612 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1613 if (tmp)
1615 gimple_call_set_arg (stmt, i, tmp);
1616 changed = true;
1619 changed |= gimple_fold_call (gsi, inplace);
1620 break;
1622 case GIMPLE_ASM:
1623 /* Fold *& in asm operands. */
1624 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1626 tree link = gimple_asm_output_op (stmt, i);
1627 tree op = TREE_VALUE (link);
1628 if (REFERENCE_CLASS_P (op)
1629 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1631 TREE_VALUE (link) = op;
1632 changed = true;
1635 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1637 tree link = gimple_asm_input_op (stmt, i);
1638 tree op = TREE_VALUE (link);
1639 if (REFERENCE_CLASS_P (op)
1640 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1642 TREE_VALUE (link) = op;
1643 changed = true;
1646 break;
1648 case GIMPLE_DEBUG:
1649 if (gimple_debug_bind_p (stmt))
1651 tree val = gimple_debug_bind_get_value (stmt);
1652 if (val
1653 && REFERENCE_CLASS_P (val))
1655 tree tem = maybe_fold_reference (val, false);
1656 if (tem)
1658 gimple_debug_bind_set_value (stmt, tem);
1659 changed = true;
1663 break;
1665 default:;
1668 stmt = gsi_stmt (*gsi);
1670 /* Fold *& on the lhs. */
1671 if (gimple_has_lhs (stmt))
1673 tree lhs = gimple_get_lhs (stmt);
1674 if (lhs && REFERENCE_CLASS_P (lhs))
1676 tree new_lhs = maybe_fold_reference (lhs, true);
1677 if (new_lhs)
1679 gimple_set_lhs (stmt, new_lhs);
1680 changed = true;
1685 return changed;
1688 /* Fold the statement pointed to by GSI. In some cases, this function may
1689 replace the whole statement with a new one. Returns true iff folding
1690 makes any changes.
1691 The statement pointed to by GSI should be in valid gimple form but may
1692 be in unfolded state as resulting from for example constant propagation
1693 which can produce *&x = 0. */
1695 bool
1696 fold_stmt (gimple_stmt_iterator *gsi)
1698 return fold_stmt_1 (gsi, false);
1701 /* Perform the minimal folding on statement STMT. Only operations like
1702 *&x created by constant propagation are handled. The statement cannot
1703 be replaced with a new one. Return true if the statement was
1704 changed, false otherwise.
1705 The statement STMT should be in valid gimple form but may
1706 be in unfolded state as resulting from for example constant propagation
1707 which can produce *&x = 0. */
1709 bool
1710 fold_stmt_inplace (gimple stmt)
1712 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1713 bool changed = fold_stmt_1 (&gsi, true);
1714 gcc_assert (gsi_stmt (gsi) == stmt);
1715 return changed;
1718 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1719 if EXPR is null or we don't know how.
1720 If non-null, the result always has boolean type. */
1722 static tree
1723 canonicalize_bool (tree expr, bool invert)
1725 if (!expr)
1726 return NULL_TREE;
1727 else if (invert)
1729 if (integer_nonzerop (expr))
1730 return boolean_false_node;
1731 else if (integer_zerop (expr))
1732 return boolean_true_node;
1733 else if (TREE_CODE (expr) == SSA_NAME)
1734 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1735 build_int_cst (TREE_TYPE (expr), 0));
1736 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1737 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1738 boolean_type_node,
1739 TREE_OPERAND (expr, 0),
1740 TREE_OPERAND (expr, 1));
1741 else
1742 return NULL_TREE;
1744 else
1746 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1747 return expr;
1748 if (integer_nonzerop (expr))
1749 return boolean_true_node;
1750 else if (integer_zerop (expr))
1751 return boolean_false_node;
1752 else if (TREE_CODE (expr) == SSA_NAME)
1753 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1754 build_int_cst (TREE_TYPE (expr), 0));
1755 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1756 return fold_build2 (TREE_CODE (expr),
1757 boolean_type_node,
1758 TREE_OPERAND (expr, 0),
1759 TREE_OPERAND (expr, 1));
1760 else
1761 return NULL_TREE;
1765 /* Check to see if a boolean expression EXPR is logically equivalent to the
1766 comparison (OP1 CODE OP2). Check for various identities involving
1767 SSA_NAMEs. */
1769 static bool
1770 same_bool_comparison_p (const_tree expr, enum tree_code code,
1771 const_tree op1, const_tree op2)
1773 gimple s;
1775 /* The obvious case. */
1776 if (TREE_CODE (expr) == code
1777 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1778 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1779 return true;
1781 /* Check for comparing (name, name != 0) and the case where expr
1782 is an SSA_NAME with a definition matching the comparison. */
1783 if (TREE_CODE (expr) == SSA_NAME
1784 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1786 if (operand_equal_p (expr, op1, 0))
1787 return ((code == NE_EXPR && integer_zerop (op2))
1788 || (code == EQ_EXPR && integer_nonzerop (op2)));
1789 s = SSA_NAME_DEF_STMT (expr);
1790 if (is_gimple_assign (s)
1791 && gimple_assign_rhs_code (s) == code
1792 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1793 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1794 return true;
1797 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1798 of name is a comparison, recurse. */
1799 if (TREE_CODE (op1) == SSA_NAME
1800 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1802 s = SSA_NAME_DEF_STMT (op1);
1803 if (is_gimple_assign (s)
1804 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1806 enum tree_code c = gimple_assign_rhs_code (s);
1807 if ((c == NE_EXPR && integer_zerop (op2))
1808 || (c == EQ_EXPR && integer_nonzerop (op2)))
1809 return same_bool_comparison_p (expr, c,
1810 gimple_assign_rhs1 (s),
1811 gimple_assign_rhs2 (s));
1812 if ((c == EQ_EXPR && integer_zerop (op2))
1813 || (c == NE_EXPR && integer_nonzerop (op2)))
1814 return same_bool_comparison_p (expr,
1815 invert_tree_comparison (c, false),
1816 gimple_assign_rhs1 (s),
1817 gimple_assign_rhs2 (s));
1820 return false;
1823 /* Check to see if two boolean expressions OP1 and OP2 are logically
1824 equivalent. */
1826 static bool
1827 same_bool_result_p (const_tree op1, const_tree op2)
1829 /* Simple cases first. */
1830 if (operand_equal_p (op1, op2, 0))
1831 return true;
1833 /* Check the cases where at least one of the operands is a comparison.
1834 These are a bit smarter than operand_equal_p in that they apply some
1835 identifies on SSA_NAMEs. */
1836 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1837 && same_bool_comparison_p (op1, TREE_CODE (op2),
1838 TREE_OPERAND (op2, 0),
1839 TREE_OPERAND (op2, 1)))
1840 return true;
1841 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1842 && same_bool_comparison_p (op2, TREE_CODE (op1),
1843 TREE_OPERAND (op1, 0),
1844 TREE_OPERAND (op1, 1)))
1845 return true;
1847 /* Default case. */
1848 return false;
1851 /* Forward declarations for some mutually recursive functions. */
1853 static tree
1854 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1855 enum tree_code code2, tree op2a, tree op2b);
1856 static tree
1857 and_var_with_comparison (tree var, bool invert,
1858 enum tree_code code2, tree op2a, tree op2b);
1859 static tree
1860 and_var_with_comparison_1 (gimple stmt,
1861 enum tree_code code2, tree op2a, tree op2b);
1862 static tree
1863 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1864 enum tree_code code2, tree op2a, tree op2b);
1865 static tree
1866 or_var_with_comparison (tree var, bool invert,
1867 enum tree_code code2, tree op2a, tree op2b);
1868 static tree
1869 or_var_with_comparison_1 (gimple stmt,
1870 enum tree_code code2, tree op2a, tree op2b);
1872 /* Helper function for and_comparisons_1: try to simplify the AND of the
1873 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1874 If INVERT is true, invert the value of the VAR before doing the AND.
1875 Return NULL_EXPR if we can't simplify this to a single expression. */
1877 static tree
1878 and_var_with_comparison (tree var, bool invert,
1879 enum tree_code code2, tree op2a, tree op2b)
1881 tree t;
1882 gimple stmt = SSA_NAME_DEF_STMT (var);
1884 /* We can only deal with variables whose definitions are assignments. */
1885 if (!is_gimple_assign (stmt))
1886 return NULL_TREE;
1888 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1889 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1890 Then we only have to consider the simpler non-inverted cases. */
1891 if (invert)
1892 t = or_var_with_comparison_1 (stmt,
1893 invert_tree_comparison (code2, false),
1894 op2a, op2b);
1895 else
1896 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1897 return canonicalize_bool (t, invert);
1900 /* Try to simplify the AND of the ssa variable defined by the assignment
1901 STMT with the comparison specified by (OP2A CODE2 OP2B).
1902 Return NULL_EXPR if we can't simplify this to a single expression. */
1904 static tree
1905 and_var_with_comparison_1 (gimple stmt,
1906 enum tree_code code2, tree op2a, tree op2b)
1908 tree var = gimple_assign_lhs (stmt);
1909 tree true_test_var = NULL_TREE;
1910 tree false_test_var = NULL_TREE;
1911 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1913 /* Check for identities like (var AND (var == 0)) => false. */
1914 if (TREE_CODE (op2a) == SSA_NAME
1915 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1917 if ((code2 == NE_EXPR && integer_zerop (op2b))
1918 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1920 true_test_var = op2a;
1921 if (var == true_test_var)
1922 return var;
1924 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1925 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1927 false_test_var = op2a;
1928 if (var == false_test_var)
1929 return boolean_false_node;
1933 /* If the definition is a comparison, recurse on it. */
1934 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1936 tree t = and_comparisons_1 (innercode,
1937 gimple_assign_rhs1 (stmt),
1938 gimple_assign_rhs2 (stmt),
1939 code2,
1940 op2a,
1941 op2b);
1942 if (t)
1943 return t;
1946 /* If the definition is an AND or OR expression, we may be able to
1947 simplify by reassociating. */
1948 if (innercode == TRUTH_AND_EXPR
1949 || innercode == TRUTH_OR_EXPR
1950 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1951 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1953 tree inner1 = gimple_assign_rhs1 (stmt);
1954 tree inner2 = gimple_assign_rhs2 (stmt);
1955 gimple s;
1956 tree t;
1957 tree partial = NULL_TREE;
1958 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1960 /* Check for boolean identities that don't require recursive examination
1961 of inner1/inner2:
1962 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1963 inner1 AND (inner1 OR inner2) => inner1
1964 !inner1 AND (inner1 AND inner2) => false
1965 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1966 Likewise for similar cases involving inner2. */
1967 if (inner1 == true_test_var)
1968 return (is_and ? var : inner1);
1969 else if (inner2 == true_test_var)
1970 return (is_and ? var : inner2);
1971 else if (inner1 == false_test_var)
1972 return (is_and
1973 ? boolean_false_node
1974 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1975 else if (inner2 == false_test_var)
1976 return (is_and
1977 ? boolean_false_node
1978 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1980 /* Next, redistribute/reassociate the AND across the inner tests.
1981 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1982 if (TREE_CODE (inner1) == SSA_NAME
1983 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1984 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1985 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1986 gimple_assign_rhs1 (s),
1987 gimple_assign_rhs2 (s),
1988 code2, op2a, op2b)))
1990 /* Handle the AND case, where we are reassociating:
1991 (inner1 AND inner2) AND (op2a code2 op2b)
1992 => (t AND inner2)
1993 If the partial result t is a constant, we win. Otherwise
1994 continue on to try reassociating with the other inner test. */
1995 if (is_and)
1997 if (integer_onep (t))
1998 return inner2;
1999 else if (integer_zerop (t))
2000 return boolean_false_node;
2003 /* Handle the OR case, where we are redistributing:
2004 (inner1 OR inner2) AND (op2a code2 op2b)
2005 => (t OR (inner2 AND (op2a code2 op2b))) */
2006 else if (integer_onep (t))
2007 return boolean_true_node;
2009 /* Save partial result for later. */
2010 partial = t;
2013 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2014 if (TREE_CODE (inner2) == SSA_NAME
2015 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2016 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2017 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2018 gimple_assign_rhs1 (s),
2019 gimple_assign_rhs2 (s),
2020 code2, op2a, op2b)))
2022 /* Handle the AND case, where we are reassociating:
2023 (inner1 AND inner2) AND (op2a code2 op2b)
2024 => (inner1 AND t) */
2025 if (is_and)
2027 if (integer_onep (t))
2028 return inner1;
2029 else if (integer_zerop (t))
2030 return boolean_false_node;
2031 /* If both are the same, we can apply the identity
2032 (x AND x) == x. */
2033 else if (partial && same_bool_result_p (t, partial))
2034 return t;
2037 /* Handle the OR case. where we are redistributing:
2038 (inner1 OR inner2) AND (op2a code2 op2b)
2039 => (t OR (inner1 AND (op2a code2 op2b)))
2040 => (t OR partial) */
2041 else
2043 if (integer_onep (t))
2044 return boolean_true_node;
2045 else if (partial)
2047 /* We already got a simplification for the other
2048 operand to the redistributed OR expression. The
2049 interesting case is when at least one is false.
2050 Or, if both are the same, we can apply the identity
2051 (x OR x) == x. */
2052 if (integer_zerop (partial))
2053 return t;
2054 else if (integer_zerop (t))
2055 return partial;
2056 else if (same_bool_result_p (t, partial))
2057 return t;
2062 return NULL_TREE;
2065 /* Try to simplify the AND of two comparisons defined by
2066 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2067 If this can be done without constructing an intermediate value,
2068 return the resulting tree; otherwise NULL_TREE is returned.
2069 This function is deliberately asymmetric as it recurses on SSA_DEFs
2070 in the first comparison but not the second. */
2072 static tree
2073 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2074 enum tree_code code2, tree op2a, tree op2b)
2076 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2077 if (operand_equal_p (op1a, op2a, 0)
2078 && operand_equal_p (op1b, op2b, 0))
2080 tree t = combine_comparisons (UNKNOWN_LOCATION,
2081 TRUTH_ANDIF_EXPR, code1, code2,
2082 boolean_type_node, op1a, op1b);
2083 if (t)
2084 return t;
2087 /* Likewise the swapped case of the above. */
2088 if (operand_equal_p (op1a, op2b, 0)
2089 && operand_equal_p (op1b, op2a, 0))
2091 tree t = combine_comparisons (UNKNOWN_LOCATION,
2092 TRUTH_ANDIF_EXPR, code1,
2093 swap_tree_comparison (code2),
2094 boolean_type_node, op1a, op1b);
2095 if (t)
2096 return t;
2099 /* If both comparisons are of the same value against constants, we might
2100 be able to merge them. */
2101 if (operand_equal_p (op1a, op2a, 0)
2102 && TREE_CODE (op1b) == INTEGER_CST
2103 && TREE_CODE (op2b) == INTEGER_CST)
2105 int cmp = tree_int_cst_compare (op1b, op2b);
2107 /* If we have (op1a == op1b), we should either be able to
2108 return that or FALSE, depending on whether the constant op1b
2109 also satisfies the other comparison against op2b. */
2110 if (code1 == EQ_EXPR)
2112 bool done = true;
2113 bool val;
2114 switch (code2)
2116 case EQ_EXPR: val = (cmp == 0); break;
2117 case NE_EXPR: val = (cmp != 0); break;
2118 case LT_EXPR: val = (cmp < 0); break;
2119 case GT_EXPR: val = (cmp > 0); break;
2120 case LE_EXPR: val = (cmp <= 0); break;
2121 case GE_EXPR: val = (cmp >= 0); break;
2122 default: done = false;
2124 if (done)
2126 if (val)
2127 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2128 else
2129 return boolean_false_node;
2132 /* Likewise if the second comparison is an == comparison. */
2133 else if (code2 == EQ_EXPR)
2135 bool done = true;
2136 bool val;
2137 switch (code1)
2139 case EQ_EXPR: val = (cmp == 0); break;
2140 case NE_EXPR: val = (cmp != 0); break;
2141 case LT_EXPR: val = (cmp > 0); break;
2142 case GT_EXPR: val = (cmp < 0); break;
2143 case LE_EXPR: val = (cmp >= 0); break;
2144 case GE_EXPR: val = (cmp <= 0); break;
2145 default: done = false;
2147 if (done)
2149 if (val)
2150 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2151 else
2152 return boolean_false_node;
2156 /* Same business with inequality tests. */
2157 else if (code1 == NE_EXPR)
2159 bool val;
2160 switch (code2)
2162 case EQ_EXPR: val = (cmp != 0); break;
2163 case NE_EXPR: val = (cmp == 0); break;
2164 case LT_EXPR: val = (cmp >= 0); break;
2165 case GT_EXPR: val = (cmp <= 0); break;
2166 case LE_EXPR: val = (cmp > 0); break;
2167 case GE_EXPR: val = (cmp < 0); break;
2168 default:
2169 val = false;
2171 if (val)
2172 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2174 else if (code2 == NE_EXPR)
2176 bool val;
2177 switch (code1)
2179 case EQ_EXPR: val = (cmp == 0); break;
2180 case NE_EXPR: val = (cmp != 0); break;
2181 case LT_EXPR: val = (cmp <= 0); break;
2182 case GT_EXPR: val = (cmp >= 0); break;
2183 case LE_EXPR: val = (cmp < 0); break;
2184 case GE_EXPR: val = (cmp > 0); break;
2185 default:
2186 val = false;
2188 if (val)
2189 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2192 /* Chose the more restrictive of two < or <= comparisons. */
2193 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2194 && (code2 == LT_EXPR || code2 == LE_EXPR))
2196 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2197 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2198 else
2199 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2202 /* Likewise chose the more restrictive of two > or >= comparisons. */
2203 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2204 && (code2 == GT_EXPR || code2 == GE_EXPR))
2206 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2207 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2208 else
2209 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2212 /* Check for singleton ranges. */
2213 else if (cmp == 0
2214 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2215 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2216 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2218 /* Check for disjoint ranges. */
2219 else if (cmp <= 0
2220 && (code1 == LT_EXPR || code1 == LE_EXPR)
2221 && (code2 == GT_EXPR || code2 == GE_EXPR))
2222 return boolean_false_node;
2223 else if (cmp >= 0
2224 && (code1 == GT_EXPR || code1 == GE_EXPR)
2225 && (code2 == LT_EXPR || code2 == LE_EXPR))
2226 return boolean_false_node;
2229 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2230 NAME's definition is a truth value. See if there are any simplifications
2231 that can be done against the NAME's definition. */
2232 if (TREE_CODE (op1a) == SSA_NAME
2233 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2234 && (integer_zerop (op1b) || integer_onep (op1b)))
2236 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2237 || (code1 == NE_EXPR && integer_onep (op1b)));
2238 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2239 switch (gimple_code (stmt))
2241 case GIMPLE_ASSIGN:
2242 /* Try to simplify by copy-propagating the definition. */
2243 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2245 case GIMPLE_PHI:
2246 /* If every argument to the PHI produces the same result when
2247 ANDed with the second comparison, we win.
2248 Do not do this unless the type is bool since we need a bool
2249 result here anyway. */
2250 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2252 tree result = NULL_TREE;
2253 unsigned i;
2254 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2256 tree arg = gimple_phi_arg_def (stmt, i);
2258 /* If this PHI has itself as an argument, ignore it.
2259 If all the other args produce the same result,
2260 we're still OK. */
2261 if (arg == gimple_phi_result (stmt))
2262 continue;
2263 else if (TREE_CODE (arg) == INTEGER_CST)
2265 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2267 if (!result)
2268 result = boolean_false_node;
2269 else if (!integer_zerop (result))
2270 return NULL_TREE;
2272 else if (!result)
2273 result = fold_build2 (code2, boolean_type_node,
2274 op2a, op2b);
2275 else if (!same_bool_comparison_p (result,
2276 code2, op2a, op2b))
2277 return NULL_TREE;
2279 else if (TREE_CODE (arg) == SSA_NAME
2280 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2282 tree temp;
2283 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2284 /* In simple cases we can look through PHI nodes,
2285 but we have to be careful with loops.
2286 See PR49073. */
2287 if (! dom_info_available_p (CDI_DOMINATORS)
2288 || gimple_bb (def_stmt) == gimple_bb (stmt)
2289 || dominated_by_p (CDI_DOMINATORS,
2290 gimple_bb (def_stmt),
2291 gimple_bb (stmt)))
2292 return NULL_TREE;
2293 temp = and_var_with_comparison (arg, invert, code2,
2294 op2a, op2b);
2295 if (!temp)
2296 return NULL_TREE;
2297 else if (!result)
2298 result = temp;
2299 else if (!same_bool_result_p (result, temp))
2300 return NULL_TREE;
2302 else
2303 return NULL_TREE;
2305 return result;
2308 default:
2309 break;
2312 return NULL_TREE;
2315 /* Try to simplify the AND of two comparisons, specified by
2316 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2317 If this can be simplified to a single expression (without requiring
2318 introducing more SSA variables to hold intermediate values),
2319 return the resulting tree. Otherwise return NULL_TREE.
2320 If the result expression is non-null, it has boolean type. */
2322 tree
2323 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2324 enum tree_code code2, tree op2a, tree op2b)
2326 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2327 if (t)
2328 return t;
2329 else
2330 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2333 /* Helper function for or_comparisons_1: try to simplify the OR of the
2334 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2335 If INVERT is true, invert the value of VAR before doing the OR.
2336 Return NULL_EXPR if we can't simplify this to a single expression. */
2338 static tree
2339 or_var_with_comparison (tree var, bool invert,
2340 enum tree_code code2, tree op2a, tree op2b)
2342 tree t;
2343 gimple stmt = SSA_NAME_DEF_STMT (var);
2345 /* We can only deal with variables whose definitions are assignments. */
2346 if (!is_gimple_assign (stmt))
2347 return NULL_TREE;
2349 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2350 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2351 Then we only have to consider the simpler non-inverted cases. */
2352 if (invert)
2353 t = and_var_with_comparison_1 (stmt,
2354 invert_tree_comparison (code2, false),
2355 op2a, op2b);
2356 else
2357 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2358 return canonicalize_bool (t, invert);
2361 /* Try to simplify the OR of the ssa variable defined by the assignment
2362 STMT with the comparison specified by (OP2A CODE2 OP2B).
2363 Return NULL_EXPR if we can't simplify this to a single expression. */
2365 static tree
2366 or_var_with_comparison_1 (gimple stmt,
2367 enum tree_code code2, tree op2a, tree op2b)
2369 tree var = gimple_assign_lhs (stmt);
2370 tree true_test_var = NULL_TREE;
2371 tree false_test_var = NULL_TREE;
2372 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2374 /* Check for identities like (var OR (var != 0)) => true . */
2375 if (TREE_CODE (op2a) == SSA_NAME
2376 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2378 if ((code2 == NE_EXPR && integer_zerop (op2b))
2379 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2381 true_test_var = op2a;
2382 if (var == true_test_var)
2383 return var;
2385 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2386 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2388 false_test_var = op2a;
2389 if (var == false_test_var)
2390 return boolean_true_node;
2394 /* If the definition is a comparison, recurse on it. */
2395 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2397 tree t = or_comparisons_1 (innercode,
2398 gimple_assign_rhs1 (stmt),
2399 gimple_assign_rhs2 (stmt),
2400 code2,
2401 op2a,
2402 op2b);
2403 if (t)
2404 return t;
2407 /* If the definition is an AND or OR expression, we may be able to
2408 simplify by reassociating. */
2409 if (innercode == TRUTH_AND_EXPR
2410 || innercode == TRUTH_OR_EXPR
2411 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2412 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2414 tree inner1 = gimple_assign_rhs1 (stmt);
2415 tree inner2 = gimple_assign_rhs2 (stmt);
2416 gimple s;
2417 tree t;
2418 tree partial = NULL_TREE;
2419 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2421 /* Check for boolean identities that don't require recursive examination
2422 of inner1/inner2:
2423 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2424 inner1 OR (inner1 AND inner2) => inner1
2425 !inner1 OR (inner1 OR inner2) => true
2426 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2428 if (inner1 == true_test_var)
2429 return (is_or ? var : inner1);
2430 else if (inner2 == true_test_var)
2431 return (is_or ? var : inner2);
2432 else if (inner1 == false_test_var)
2433 return (is_or
2434 ? boolean_true_node
2435 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2436 else if (inner2 == false_test_var)
2437 return (is_or
2438 ? boolean_true_node
2439 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2441 /* Next, redistribute/reassociate the OR across the inner tests.
2442 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2443 if (TREE_CODE (inner1) == SSA_NAME
2444 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2445 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2446 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2447 gimple_assign_rhs1 (s),
2448 gimple_assign_rhs2 (s),
2449 code2, op2a, op2b)))
2451 /* Handle the OR case, where we are reassociating:
2452 (inner1 OR inner2) OR (op2a code2 op2b)
2453 => (t OR inner2)
2454 If the partial result t is a constant, we win. Otherwise
2455 continue on to try reassociating with the other inner test. */
2456 if (is_or)
2458 if (integer_onep (t))
2459 return boolean_true_node;
2460 else if (integer_zerop (t))
2461 return inner2;
2464 /* Handle the AND case, where we are redistributing:
2465 (inner1 AND inner2) OR (op2a code2 op2b)
2466 => (t AND (inner2 OR (op2a code op2b))) */
2467 else if (integer_zerop (t))
2468 return boolean_false_node;
2470 /* Save partial result for later. */
2471 partial = t;
2474 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2475 if (TREE_CODE (inner2) == SSA_NAME
2476 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2477 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2478 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2479 gimple_assign_rhs1 (s),
2480 gimple_assign_rhs2 (s),
2481 code2, op2a, op2b)))
2483 /* Handle the OR case, where we are reassociating:
2484 (inner1 OR inner2) OR (op2a code2 op2b)
2485 => (inner1 OR t)
2486 => (t OR partial) */
2487 if (is_or)
2489 if (integer_zerop (t))
2490 return inner1;
2491 else if (integer_onep (t))
2492 return boolean_true_node;
2493 /* If both are the same, we can apply the identity
2494 (x OR x) == x. */
2495 else if (partial && same_bool_result_p (t, partial))
2496 return t;
2499 /* Handle the AND case, where we are redistributing:
2500 (inner1 AND inner2) OR (op2a code2 op2b)
2501 => (t AND (inner1 OR (op2a code2 op2b)))
2502 => (t AND partial) */
2503 else
2505 if (integer_zerop (t))
2506 return boolean_false_node;
2507 else if (partial)
2509 /* We already got a simplification for the other
2510 operand to the redistributed AND expression. The
2511 interesting case is when at least one is true.
2512 Or, if both are the same, we can apply the identity
2513 (x AND x) == x. */
2514 if (integer_onep (partial))
2515 return t;
2516 else if (integer_onep (t))
2517 return partial;
2518 else if (same_bool_result_p (t, partial))
2519 return t;
2524 return NULL_TREE;
2527 /* Try to simplify the OR of two comparisons defined by
2528 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2529 If this can be done without constructing an intermediate value,
2530 return the resulting tree; otherwise NULL_TREE is returned.
2531 This function is deliberately asymmetric as it recurses on SSA_DEFs
2532 in the first comparison but not the second. */
2534 static tree
2535 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2536 enum tree_code code2, tree op2a, tree op2b)
2538 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2539 if (operand_equal_p (op1a, op2a, 0)
2540 && operand_equal_p (op1b, op2b, 0))
2542 tree t = combine_comparisons (UNKNOWN_LOCATION,
2543 TRUTH_ORIF_EXPR, code1, code2,
2544 boolean_type_node, op1a, op1b);
2545 if (t)
2546 return t;
2549 /* Likewise the swapped case of the above. */
2550 if (operand_equal_p (op1a, op2b, 0)
2551 && operand_equal_p (op1b, op2a, 0))
2553 tree t = combine_comparisons (UNKNOWN_LOCATION,
2554 TRUTH_ORIF_EXPR, code1,
2555 swap_tree_comparison (code2),
2556 boolean_type_node, op1a, op1b);
2557 if (t)
2558 return t;
2561 /* If both comparisons are of the same value against constants, we might
2562 be able to merge them. */
2563 if (operand_equal_p (op1a, op2a, 0)
2564 && TREE_CODE (op1b) == INTEGER_CST
2565 && TREE_CODE (op2b) == INTEGER_CST)
2567 int cmp = tree_int_cst_compare (op1b, op2b);
2569 /* If we have (op1a != op1b), we should either be able to
2570 return that or TRUE, depending on whether the constant op1b
2571 also satisfies the other comparison against op2b. */
2572 if (code1 == NE_EXPR)
2574 bool done = true;
2575 bool val;
2576 switch (code2)
2578 case EQ_EXPR: val = (cmp == 0); break;
2579 case NE_EXPR: val = (cmp != 0); break;
2580 case LT_EXPR: val = (cmp < 0); break;
2581 case GT_EXPR: val = (cmp > 0); break;
2582 case LE_EXPR: val = (cmp <= 0); break;
2583 case GE_EXPR: val = (cmp >= 0); break;
2584 default: done = false;
2586 if (done)
2588 if (val)
2589 return boolean_true_node;
2590 else
2591 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2594 /* Likewise if the second comparison is a != comparison. */
2595 else if (code2 == NE_EXPR)
2597 bool done = true;
2598 bool val;
2599 switch (code1)
2601 case EQ_EXPR: val = (cmp == 0); break;
2602 case NE_EXPR: val = (cmp != 0); break;
2603 case LT_EXPR: val = (cmp > 0); break;
2604 case GT_EXPR: val = (cmp < 0); break;
2605 case LE_EXPR: val = (cmp >= 0); break;
2606 case GE_EXPR: val = (cmp <= 0); break;
2607 default: done = false;
2609 if (done)
2611 if (val)
2612 return boolean_true_node;
2613 else
2614 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2618 /* See if an equality test is redundant with the other comparison. */
2619 else if (code1 == EQ_EXPR)
2621 bool val;
2622 switch (code2)
2624 case EQ_EXPR: val = (cmp == 0); break;
2625 case NE_EXPR: val = (cmp != 0); break;
2626 case LT_EXPR: val = (cmp < 0); break;
2627 case GT_EXPR: val = (cmp > 0); break;
2628 case LE_EXPR: val = (cmp <= 0); break;
2629 case GE_EXPR: val = (cmp >= 0); break;
2630 default:
2631 val = false;
2633 if (val)
2634 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2636 else if (code2 == EQ_EXPR)
2638 bool val;
2639 switch (code1)
2641 case EQ_EXPR: val = (cmp == 0); break;
2642 case NE_EXPR: val = (cmp != 0); break;
2643 case LT_EXPR: val = (cmp > 0); break;
2644 case GT_EXPR: val = (cmp < 0); break;
2645 case LE_EXPR: val = (cmp >= 0); break;
2646 case GE_EXPR: val = (cmp <= 0); break;
2647 default:
2648 val = false;
2650 if (val)
2651 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2654 /* Chose the less restrictive of two < or <= comparisons. */
2655 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2656 && (code2 == LT_EXPR || code2 == LE_EXPR))
2658 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2659 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2660 else
2661 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2664 /* Likewise chose the less restrictive of two > or >= comparisons. */
2665 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2666 && (code2 == GT_EXPR || code2 == GE_EXPR))
2668 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2669 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2670 else
2671 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2674 /* Check for singleton ranges. */
2675 else if (cmp == 0
2676 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2677 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2678 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2680 /* Check for less/greater pairs that don't restrict the range at all. */
2681 else if (cmp >= 0
2682 && (code1 == LT_EXPR || code1 == LE_EXPR)
2683 && (code2 == GT_EXPR || code2 == GE_EXPR))
2684 return boolean_true_node;
2685 else if (cmp <= 0
2686 && (code1 == GT_EXPR || code1 == GE_EXPR)
2687 && (code2 == LT_EXPR || code2 == LE_EXPR))
2688 return boolean_true_node;
2691 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2692 NAME's definition is a truth value. See if there are any simplifications
2693 that can be done against the NAME's definition. */
2694 if (TREE_CODE (op1a) == SSA_NAME
2695 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2696 && (integer_zerop (op1b) || integer_onep (op1b)))
2698 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2699 || (code1 == NE_EXPR && integer_onep (op1b)));
2700 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2701 switch (gimple_code (stmt))
2703 case GIMPLE_ASSIGN:
2704 /* Try to simplify by copy-propagating the definition. */
2705 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2707 case GIMPLE_PHI:
2708 /* If every argument to the PHI produces the same result when
2709 ORed with the second comparison, we win.
2710 Do not do this unless the type is bool since we need a bool
2711 result here anyway. */
2712 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2714 tree result = NULL_TREE;
2715 unsigned i;
2716 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2718 tree arg = gimple_phi_arg_def (stmt, i);
2720 /* If this PHI has itself as an argument, ignore it.
2721 If all the other args produce the same result,
2722 we're still OK. */
2723 if (arg == gimple_phi_result (stmt))
2724 continue;
2725 else if (TREE_CODE (arg) == INTEGER_CST)
2727 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2729 if (!result)
2730 result = boolean_true_node;
2731 else if (!integer_onep (result))
2732 return NULL_TREE;
2734 else if (!result)
2735 result = fold_build2 (code2, boolean_type_node,
2736 op2a, op2b);
2737 else if (!same_bool_comparison_p (result,
2738 code2, op2a, op2b))
2739 return NULL_TREE;
2741 else if (TREE_CODE (arg) == SSA_NAME
2742 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2744 tree temp;
2745 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2746 /* In simple cases we can look through PHI nodes,
2747 but we have to be careful with loops.
2748 See PR49073. */
2749 if (! dom_info_available_p (CDI_DOMINATORS)
2750 || gimple_bb (def_stmt) == gimple_bb (stmt)
2751 || dominated_by_p (CDI_DOMINATORS,
2752 gimple_bb (def_stmt),
2753 gimple_bb (stmt)))
2754 return NULL_TREE;
2755 temp = or_var_with_comparison (arg, invert, code2,
2756 op2a, op2b);
2757 if (!temp)
2758 return NULL_TREE;
2759 else if (!result)
2760 result = temp;
2761 else if (!same_bool_result_p (result, temp))
2762 return NULL_TREE;
2764 else
2765 return NULL_TREE;
2767 return result;
2770 default:
2771 break;
2774 return NULL_TREE;
2777 /* Try to simplify the OR of two comparisons, specified by
2778 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2779 If this can be simplified to a single expression (without requiring
2780 introducing more SSA variables to hold intermediate values),
2781 return the resulting tree. Otherwise return NULL_TREE.
2782 If the result expression is non-null, it has boolean type. */
2784 tree
2785 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2786 enum tree_code code2, tree op2a, tree op2b)
2788 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2789 if (t)
2790 return t;
2791 else
2792 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2796 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2798 Either NULL_TREE, a simplified but non-constant or a constant
2799 is returned.
2801 ??? This should go into a gimple-fold-inline.h file to be eventually
2802 privatized with the single valueize function used in the various TUs
2803 to avoid the indirect function call overhead. */
2805 tree
2806 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2808 location_t loc = gimple_location (stmt);
2809 switch (gimple_code (stmt))
2811 case GIMPLE_ASSIGN:
2813 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2815 switch (get_gimple_rhs_class (subcode))
2817 case GIMPLE_SINGLE_RHS:
2819 tree rhs = gimple_assign_rhs1 (stmt);
2820 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2822 if (TREE_CODE (rhs) == SSA_NAME)
2824 /* If the RHS is an SSA_NAME, return its known constant value,
2825 if any. */
2826 return (*valueize) (rhs);
2828 /* Handle propagating invariant addresses into address
2829 operations. */
2830 else if (TREE_CODE (rhs) == ADDR_EXPR
2831 && !is_gimple_min_invariant (rhs))
2833 HOST_WIDE_INT offset;
2834 tree base;
2835 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2836 &offset,
2837 valueize);
2838 if (base
2839 && (CONSTANT_CLASS_P (base)
2840 || decl_address_invariant_p (base)))
2841 return build_invariant_address (TREE_TYPE (rhs),
2842 base, offset);
2844 else if (TREE_CODE (rhs) == CONSTRUCTOR
2845 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2846 && (CONSTRUCTOR_NELTS (rhs)
2847 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2849 unsigned i;
2850 tree val, list;
2852 list = NULL_TREE;
2853 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2855 val = (*valueize) (val);
2856 if (TREE_CODE (val) == INTEGER_CST
2857 || TREE_CODE (val) == REAL_CST
2858 || TREE_CODE (val) == FIXED_CST)
2859 list = tree_cons (NULL_TREE, val, list);
2860 else
2861 return NULL_TREE;
2864 return build_vector (TREE_TYPE (rhs), nreverse (list));
2867 if (kind == tcc_reference)
2869 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2870 || TREE_CODE (rhs) == REALPART_EXPR
2871 || TREE_CODE (rhs) == IMAGPART_EXPR)
2872 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2874 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2875 return fold_unary_loc (EXPR_LOCATION (rhs),
2876 TREE_CODE (rhs),
2877 TREE_TYPE (rhs), val);
2879 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2880 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2882 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2883 return fold_ternary_loc (EXPR_LOCATION (rhs),
2884 TREE_CODE (rhs),
2885 TREE_TYPE (rhs), val,
2886 TREE_OPERAND (rhs, 1),
2887 TREE_OPERAND (rhs, 2));
2889 else if (TREE_CODE (rhs) == MEM_REF
2890 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2892 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2893 if (TREE_CODE (val) == ADDR_EXPR
2894 && is_gimple_min_invariant (val))
2896 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2897 unshare_expr (val),
2898 TREE_OPERAND (rhs, 1));
2899 if (tem)
2900 rhs = tem;
2903 return fold_const_aggregate_ref_1 (rhs, valueize);
2905 else if (kind == tcc_declaration)
2906 return get_symbol_constant_value (rhs);
2907 return rhs;
2910 case GIMPLE_UNARY_RHS:
2912 /* Handle unary operators that can appear in GIMPLE form.
2913 Note that we know the single operand must be a constant,
2914 so this should almost always return a simplified RHS. */
2915 tree lhs = gimple_assign_lhs (stmt);
2916 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2918 /* Conversions are useless for CCP purposes if they are
2919 value-preserving. Thus the restrictions that
2920 useless_type_conversion_p places for pointer type conversions
2921 do not apply here. Substitution later will only substitute to
2922 allowed places. */
2923 if (CONVERT_EXPR_CODE_P (subcode)
2924 && POINTER_TYPE_P (TREE_TYPE (lhs))
2925 && POINTER_TYPE_P (TREE_TYPE (op0)))
2927 tree tem;
2928 /* Try to re-construct array references on-the-fly. */
2929 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2930 TREE_TYPE (op0))
2931 && ((tem = maybe_fold_offset_to_address
2932 (loc,
2933 op0, integer_zero_node, TREE_TYPE (lhs)))
2934 != NULL_TREE))
2935 return tem;
2936 return op0;
2939 return
2940 fold_unary_ignore_overflow_loc (loc, subcode,
2941 gimple_expr_type (stmt), op0);
2944 case GIMPLE_BINARY_RHS:
2946 /* Handle binary operators that can appear in GIMPLE form. */
2947 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2948 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2950 /* Translate &x + CST into an invariant form suitable for
2951 further propagation. */
2952 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2953 && TREE_CODE (op0) == ADDR_EXPR
2954 && TREE_CODE (op1) == INTEGER_CST)
2956 tree off = fold_convert (ptr_type_node, op1);
2957 return build_fold_addr_expr
2958 (fold_build2 (MEM_REF,
2959 TREE_TYPE (TREE_TYPE (op0)),
2960 unshare_expr (op0), off));
2963 return fold_binary_loc (loc, subcode,
2964 gimple_expr_type (stmt), op0, op1);
2967 case GIMPLE_TERNARY_RHS:
2969 /* Handle ternary operators that can appear in GIMPLE form. */
2970 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2971 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2972 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2974 return fold_ternary_loc (loc, subcode,
2975 gimple_expr_type (stmt), op0, op1, op2);
2978 default:
2979 gcc_unreachable ();
2983 case GIMPLE_CALL:
2985 tree fn;
2987 if (gimple_call_internal_p (stmt))
2988 /* No folding yet for these functions. */
2989 return NULL_TREE;
2991 fn = (*valueize) (gimple_call_fn (stmt));
2992 if (TREE_CODE (fn) == ADDR_EXPR
2993 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2994 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2996 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2997 tree call, retval;
2998 unsigned i;
2999 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3000 args[i] = (*valueize) (gimple_call_arg (stmt, i));
3001 call = build_call_array_loc (loc,
3002 gimple_call_return_type (stmt),
3003 fn, gimple_call_num_args (stmt), args);
3004 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
3005 if (retval)
3006 /* fold_call_expr wraps the result inside a NOP_EXPR. */
3007 STRIP_NOPS (retval);
3008 return retval;
3010 return NULL_TREE;
3013 default:
3014 return NULL_TREE;
3018 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
3019 Returns NULL_TREE if folding to a constant is not possible, otherwise
3020 returns a constant according to is_gimple_min_invariant. */
3022 tree
3023 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
3025 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
3026 if (res && is_gimple_min_invariant (res))
3027 return res;
3028 return NULL_TREE;
3032 /* The following set of functions are supposed to fold references using
3033 their constant initializers. */
3035 static tree fold_ctor_reference (tree type, tree ctor,
3036 unsigned HOST_WIDE_INT offset,
3037 unsigned HOST_WIDE_INT size);
3039 /* See if we can find constructor defining value of BASE.
3040 When we know the consructor with constant offset (such as
3041 base is array[40] and we do know constructor of array), then
3042 BIT_OFFSET is adjusted accordingly.
3044 As a special case, return error_mark_node when constructor
3045 is not explicitly available, but it is known to be zero
3046 such as 'static const int a;'. */
3047 static tree
3048 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
3049 tree (*valueize)(tree))
3051 HOST_WIDE_INT bit_offset2, size, max_size;
3052 if (TREE_CODE (base) == MEM_REF)
3054 if (!integer_zerop (TREE_OPERAND (base, 1)))
3056 if (!host_integerp (TREE_OPERAND (base, 1), 0))
3057 return NULL_TREE;
3058 *bit_offset += (mem_ref_offset (base).low
3059 * BITS_PER_UNIT);
3062 if (valueize
3063 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3064 base = valueize (TREE_OPERAND (base, 0));
3065 if (!base || TREE_CODE (base) != ADDR_EXPR)
3066 return NULL_TREE;
3067 base = TREE_OPERAND (base, 0);
3070 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
3071 DECL_INITIAL. If BASE is a nested reference into another
3072 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
3073 the inner reference. */
3074 switch (TREE_CODE (base))
3076 case VAR_DECL:
3077 if (!const_value_known_p (base))
3078 return NULL_TREE;
3080 /* Fallthru. */
3081 case CONST_DECL:
3082 if (!DECL_INITIAL (base)
3083 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
3084 return error_mark_node;
3085 return DECL_INITIAL (base);
3087 case ARRAY_REF:
3088 case COMPONENT_REF:
3089 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
3090 if (max_size == -1 || size != max_size)
3091 return NULL_TREE;
3092 *bit_offset += bit_offset2;
3093 return get_base_constructor (base, bit_offset, valueize);
3095 case STRING_CST:
3096 case CONSTRUCTOR:
3097 return base;
3099 default:
3100 return NULL_TREE;
3104 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
3105 to the memory at bit OFFSET.
3107 We do only simple job of folding byte accesses. */
3109 static tree
3110 fold_string_cst_ctor_reference (tree type, tree ctor,
3111 unsigned HOST_WIDE_INT offset,
3112 unsigned HOST_WIDE_INT size)
3114 if (INTEGRAL_TYPE_P (type)
3115 && (TYPE_MODE (type)
3116 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3117 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3118 == MODE_INT)
3119 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
3120 && size == BITS_PER_UNIT
3121 && !(offset % BITS_PER_UNIT))
3123 offset /= BITS_PER_UNIT;
3124 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
3125 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
3126 [offset]));
3127 /* Folding
3128 const char a[20]="hello";
3129 return a[10];
3131 might lead to offset greater than string length. In this case we
3132 know value is either initialized to 0 or out of bounds. Return 0
3133 in both cases. */
3134 return build_zero_cst (type);
3136 return NULL_TREE;
3139 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3140 SIZE to the memory at bit OFFSET. */
3142 static tree
3143 fold_array_ctor_reference (tree type, tree ctor,
3144 unsigned HOST_WIDE_INT offset,
3145 unsigned HOST_WIDE_INT size)
3147 unsigned HOST_WIDE_INT cnt;
3148 tree cfield, cval;
3149 double_int low_bound, elt_size;
3150 double_int index, max_index;
3151 double_int access_index;
3152 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
3153 HOST_WIDE_INT inner_offset;
3155 /* Compute low bound and elt size. */
3156 if (domain_type && TYPE_MIN_VALUE (domain_type))
3158 /* Static constructors for variably sized objects makes no sense. */
3159 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
3160 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
3162 else
3163 low_bound = double_int_zero;
3164 /* Static constructors for variably sized objects makes no sense. */
3165 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
3166 == INTEGER_CST);
3167 elt_size =
3168 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
3171 /* We can handle only constantly sized accesses that are known to not
3172 be larger than size of array element. */
3173 if (!TYPE_SIZE_UNIT (type)
3174 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
3175 || double_int_cmp (elt_size,
3176 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
3177 return NULL_TREE;
3179 /* Compute the array index we look for. */
3180 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
3181 elt_size, TRUNC_DIV_EXPR);
3182 access_index = double_int_add (access_index, low_bound);
3184 /* And offset within the access. */
3185 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
3187 /* See if the array field is large enough to span whole access. We do not
3188 care to fold accesses spanning multiple array indexes. */
3189 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
3190 return NULL_TREE;
3192 index = double_int_sub (low_bound, double_int_one);
3193 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
3195 /* Array constructor might explicitely set index, or specify range
3196 or leave index NULL meaning that it is next index after previous
3197 one. */
3198 if (cfield)
3200 if (TREE_CODE (cfield) == INTEGER_CST)
3201 max_index = index = tree_to_double_int (cfield);
3202 else
3204 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3205 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3206 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3209 else
3210 max_index = index = double_int_add (index, double_int_one);
3212 /* Do we have match? */
3213 if (double_int_cmp (access_index, index, 1) >= 0
3214 && double_int_cmp (access_index, max_index, 1) <= 0)
3215 return fold_ctor_reference (type, cval, inner_offset, size);
3217 /* When memory is not explicitely mentioned in constructor,
3218 it is 0 (or out of range). */
3219 return build_zero_cst (type);
3222 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3223 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3225 static tree
3226 fold_nonarray_ctor_reference (tree type, tree ctor,
3227 unsigned HOST_WIDE_INT offset,
3228 unsigned HOST_WIDE_INT size)
3230 unsigned HOST_WIDE_INT cnt;
3231 tree cfield, cval;
3233 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3234 cval)
3236 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3237 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3238 tree field_size = DECL_SIZE (cfield);
3239 double_int bitoffset;
3240 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3241 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
3242 double_int bitoffset_end;
3244 /* Variable sized objects in static constructors makes no sense,
3245 but field_size can be NULL for flexible array members. */
3246 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3247 && TREE_CODE (byte_offset) == INTEGER_CST
3248 && (field_size != NULL_TREE
3249 ? TREE_CODE (field_size) == INTEGER_CST
3250 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3252 /* Compute bit offset of the field. */
3253 bitoffset = double_int_add (tree_to_double_int (field_offset),
3254 double_int_mul (byte_offset_cst,
3255 bits_per_unit_cst));
3256 /* Compute bit offset where the field ends. */
3257 if (field_size != NULL_TREE)
3258 bitoffset_end = double_int_add (bitoffset,
3259 tree_to_double_int (field_size));
3260 else
3261 bitoffset_end = double_int_zero;
3263 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
3264 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
3265 && (field_size == NULL_TREE
3266 || double_int_cmp (uhwi_to_double_int (offset),
3267 bitoffset_end, 0) < 0))
3269 double_int access_end = double_int_add (uhwi_to_double_int (offset),
3270 uhwi_to_double_int (size));
3271 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
3272 bitoffset);
3273 /* We do have overlap. Now see if field is large enough to
3274 cover the access. Give up for accesses spanning multiple
3275 fields. */
3276 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
3277 return NULL_TREE;
3278 return fold_ctor_reference (type, cval,
3279 double_int_to_uhwi (inner_offset), size);
3282 /* When memory is not explicitely mentioned in constructor, it is 0. */
3283 return build_zero_cst (type);
3286 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3287 to the memory at bit OFFSET. */
3289 static tree
3290 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3291 unsigned HOST_WIDE_INT size)
3293 tree ret;
3295 /* We found the field with exact match. */
3296 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3297 && !offset)
3298 return canonicalize_constructor_val (ctor);
3300 /* We are at the end of walk, see if we can view convert the
3301 result. */
3302 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3303 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3304 && operand_equal_p (TYPE_SIZE (type),
3305 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3307 ret = canonicalize_constructor_val (ctor);
3308 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3309 if (ret)
3310 STRIP_NOPS (ret);
3311 return ret;
3313 if (TREE_CODE (ctor) == STRING_CST)
3314 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3315 if (TREE_CODE (ctor) == CONSTRUCTOR)
3318 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
3319 return fold_array_ctor_reference (type, ctor, offset, size);
3320 else
3321 return fold_nonarray_ctor_reference (type, ctor, offset, size);
3324 return NULL_TREE;
3327 /* Return the tree representing the element referenced by T if T is an
3328 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3329 names using VALUEIZE. Return NULL_TREE otherwise. */
3331 tree
3332 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3334 tree ctor, idx, base;
3335 HOST_WIDE_INT offset, size, max_size;
3336 tree tem;
3338 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3339 return get_symbol_constant_value (t);
3341 tem = fold_read_from_constant_string (t);
3342 if (tem)
3343 return tem;
3345 switch (TREE_CODE (t))
3347 case ARRAY_REF:
3348 case ARRAY_RANGE_REF:
3349 /* Constant indexes are handled well by get_base_constructor.
3350 Only special case variable offsets.
3351 FIXME: This code can't handle nested references with variable indexes
3352 (they will be handled only by iteration of ccp). Perhaps we can bring
3353 get_ref_base_and_extent here and make it use a valueize callback. */
3354 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3355 && valueize
3356 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3357 && host_integerp (idx, 0))
3359 tree low_bound, unit_size;
3361 /* If the resulting bit-offset is constant, track it. */
3362 if ((low_bound = array_ref_low_bound (t),
3363 host_integerp (low_bound, 0))
3364 && (unit_size = array_ref_element_size (t),
3365 host_integerp (unit_size, 1)))
3367 offset = TREE_INT_CST_LOW (idx);
3368 offset -= TREE_INT_CST_LOW (low_bound);
3369 offset *= TREE_INT_CST_LOW (unit_size);
3370 offset *= BITS_PER_UNIT;
3372 base = TREE_OPERAND (t, 0);
3373 ctor = get_base_constructor (base, &offset, valueize);
3374 /* Empty constructor. Always fold to 0. */
3375 if (ctor == error_mark_node)
3376 return build_zero_cst (TREE_TYPE (t));
3377 /* Out of bound array access. Value is undefined,
3378 but don't fold. */
3379 if (offset < 0)
3380 return NULL_TREE;
3381 /* We can not determine ctor. */
3382 if (!ctor)
3383 return NULL_TREE;
3384 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3385 TREE_INT_CST_LOW (unit_size)
3386 * BITS_PER_UNIT);
3389 /* Fallthru. */
3391 case COMPONENT_REF:
3392 case BIT_FIELD_REF:
3393 case TARGET_MEM_REF:
3394 case MEM_REF:
3395 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3396 ctor = get_base_constructor (base, &offset, valueize);
3398 /* Empty constructor. Always fold to 0. */
3399 if (ctor == error_mark_node)
3400 return build_zero_cst (TREE_TYPE (t));
3401 /* We do not know precise address. */
3402 if (max_size == -1 || max_size != size)
3403 return NULL_TREE;
3404 /* We can not determine ctor. */
3405 if (!ctor)
3406 return NULL_TREE;
3408 /* Out of bound array access. Value is undefined, but don't fold. */
3409 if (offset < 0)
3410 return NULL_TREE;
3412 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3414 case REALPART_EXPR:
3415 case IMAGPART_EXPR:
3417 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3418 if (c && TREE_CODE (c) == COMPLEX_CST)
3419 return fold_build1_loc (EXPR_LOCATION (t),
3420 TREE_CODE (t), TREE_TYPE (t), c);
3421 break;
3424 default:
3425 break;
3428 return NULL_TREE;
3431 tree
3432 fold_const_aggregate_ref (tree t)
3434 return fold_const_aggregate_ref_1 (t, NULL);