* config/rx/rx.h (LABEL_ALIGN_FOR_BARRIER): Define.
[official-gcc.git] / gcc / gimple-fold.c
blob367e40e30295a696dd872c09ef77d8d0b27b4cb5
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced. */
84 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
85 return true;
86 /* If we already output the function body, we are safe. */
87 if (TREE_ASM_WRITTEN (decl))
88 return true;
89 if (TREE_CODE (decl) == FUNCTION_DECL)
91 node = cgraph_get_node (decl);
92 /* Check that we still have function body and that we didn't took
93 the decision to eliminate offline copy of the function yet.
94 The second is important when devirtualization happens during final
95 compilation stage when making a new reference no longer makes callee
96 to be compiled. */
97 if (!node || !node->analyzed || node->global.inlined_to)
98 return false;
100 else if (TREE_CODE (decl) == VAR_DECL)
102 vnode = varpool_get_node (decl);
103 if (!vnode || !vnode->finalized)
104 return false;
106 return true;
109 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
110 acceptable form for is_gimple_min_invariant. */
112 tree
113 canonicalize_constructor_val (tree cval)
115 STRIP_NOPS (cval);
116 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
118 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
119 TREE_OPERAND (cval, 0),
120 TREE_OPERAND (cval, 1),
121 TREE_TYPE (cval));
122 if (t)
123 cval = t;
125 if (TREE_CODE (cval) == ADDR_EXPR)
127 tree base = get_base_address (TREE_OPERAND (cval, 0));
129 if (base
130 && (TREE_CODE (base) == VAR_DECL
131 || TREE_CODE (base) == FUNCTION_DECL)
132 && !can_refer_decl_in_current_unit_p (base))
133 return NULL_TREE;
134 if (base && TREE_CODE (base) == VAR_DECL)
135 add_referenced_var (base);
136 /* We never have the chance to fixup types in global initializers
137 during gimplification. Do so here. */
138 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
139 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
141 return cval;
144 /* If SYM is a constant variable with known value, return the value.
145 NULL_TREE is returned otherwise. */
147 tree
148 get_symbol_constant_value (tree sym)
150 if (const_value_known_p (sym))
152 tree val = DECL_INITIAL (sym);
153 if (val)
155 val = canonicalize_constructor_val (val);
156 if (val && is_gimple_min_invariant (val))
157 return val;
158 else
159 return NULL_TREE;
161 /* Variables declared 'const' without an initializer
162 have zero as the initializer if they may not be
163 overridden at link or run time. */
164 if (!val
165 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
166 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
167 return build_zero_cst (TREE_TYPE (sym));
170 return NULL_TREE;
174 /* Return true if we may propagate the address expression ADDR into the
175 dereference DEREF and cancel them. */
177 bool
178 may_propagate_address_into_dereference (tree addr, tree deref)
180 gcc_assert (TREE_CODE (deref) == MEM_REF
181 && TREE_CODE (addr) == ADDR_EXPR);
183 /* Don't propagate if ADDR's operand has incomplete type. */
184 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
185 return false;
187 /* If the address is invariant then we do not need to preserve restrict
188 qualifications. But we do need to preserve volatile qualifiers until
189 we can annotate the folded dereference itself properly. */
190 if (is_gimple_min_invariant (addr)
191 && (!TREE_THIS_VOLATILE (deref)
192 || TYPE_VOLATILE (TREE_TYPE (addr))))
193 return useless_type_conversion_p (TREE_TYPE (deref),
194 TREE_TYPE (TREE_OPERAND (addr, 0)));
196 /* Else both the address substitution and the folding must result in
197 a valid useless type conversion sequence. */
198 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
199 TREE_TYPE (addr))
200 && useless_type_conversion_p (TREE_TYPE (deref),
201 TREE_TYPE (TREE_OPERAND (addr, 0))));
205 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
206 BASE is an array type. OFFSET is a byte displacement.
208 LOC is the location of the original expression. */
210 static tree
211 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
213 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
214 tree array_type, elt_type, elt_size;
215 tree domain_type;
217 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
218 measured in units of the size of elements type) from that ARRAY_REF).
219 We can't do anything if either is variable.
221 The case we handle here is *(&A[N]+O). */
222 if (TREE_CODE (base) == ARRAY_REF)
224 tree low_bound = array_ref_low_bound (base);
226 elt_offset = TREE_OPERAND (base, 1);
227 if (TREE_CODE (low_bound) != INTEGER_CST
228 || TREE_CODE (elt_offset) != INTEGER_CST)
229 return NULL_TREE;
231 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
232 base = TREE_OPERAND (base, 0);
235 /* Ignore stupid user tricks of indexing non-array variables. */
236 array_type = TREE_TYPE (base);
237 if (TREE_CODE (array_type) != ARRAY_TYPE)
238 return NULL_TREE;
239 elt_type = TREE_TYPE (array_type);
241 /* Use signed size type for intermediate computation on the index. */
242 idx_type = ssizetype;
244 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
245 element type (so we can use the alignment if it's not constant).
246 Otherwise, compute the offset as an index by using a division. If the
247 division isn't exact, then don't do anything. */
248 elt_size = TYPE_SIZE_UNIT (elt_type);
249 if (!elt_size)
250 return NULL;
251 if (integer_zerop (offset))
253 if (TREE_CODE (elt_size) != INTEGER_CST)
254 elt_size = size_int (TYPE_ALIGN (elt_type));
256 idx = build_int_cst (idx_type, 0);
258 else
260 unsigned HOST_WIDE_INT lquo, lrem;
261 HOST_WIDE_INT hquo, hrem;
262 double_int soffset;
264 /* The final array offset should be signed, so we need
265 to sign-extend the (possibly pointer) offset here
266 and use signed division. */
267 soffset = double_int_sext (tree_to_double_int (offset),
268 TYPE_PRECISION (TREE_TYPE (offset)));
269 if (TREE_CODE (elt_size) != INTEGER_CST
270 || div_and_round_double (TRUNC_DIV_EXPR, 0,
271 soffset.low, soffset.high,
272 TREE_INT_CST_LOW (elt_size),
273 TREE_INT_CST_HIGH (elt_size),
274 &lquo, &hquo, &lrem, &hrem)
275 || lrem || hrem)
276 return NULL_TREE;
278 idx = build_int_cst_wide (idx_type, lquo, hquo);
281 /* Assume the low bound is zero. If there is a domain type, get the
282 low bound, if any, convert the index into that type, and add the
283 low bound. */
284 min_idx = build_int_cst (idx_type, 0);
285 domain_type = TYPE_DOMAIN (array_type);
286 if (domain_type)
288 idx_type = domain_type;
289 if (TYPE_MIN_VALUE (idx_type))
290 min_idx = TYPE_MIN_VALUE (idx_type);
291 else
292 min_idx = fold_convert (idx_type, min_idx);
294 if (TREE_CODE (min_idx) != INTEGER_CST)
295 return NULL_TREE;
297 elt_offset = fold_convert (idx_type, elt_offset);
300 if (!integer_zerop (min_idx))
301 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
302 if (!integer_zerop (elt_offset))
303 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
305 /* Make sure to possibly truncate late after offsetting. */
306 idx = fold_convert (idx_type, idx);
308 /* We don't want to construct access past array bounds. For example
309 char *(c[4]);
310 c[3][2];
311 should not be simplified into (*c)[14] or tree-vrp will
312 give false warnings.
313 This is only an issue for multi-dimensional arrays. */
314 if (TREE_CODE (elt_type) == ARRAY_TYPE
315 && domain_type)
317 if (TYPE_MAX_VALUE (domain_type)
318 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
319 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
320 return NULL_TREE;
321 else if (TYPE_MIN_VALUE (domain_type)
322 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
323 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
324 return NULL_TREE;
325 else if (compare_tree_int (idx, 0) < 0)
326 return NULL_TREE;
330 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
331 SET_EXPR_LOCATION (t, loc);
332 return t;
337 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
338 LOC is the location of original expression.
340 Before attempting the conversion strip off existing ADDR_EXPRs. */
342 tree
343 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
344 tree orig_type)
346 tree ret;
348 STRIP_NOPS (base);
349 if (TREE_CODE (base) != ADDR_EXPR)
350 return NULL_TREE;
352 base = TREE_OPERAND (base, 0);
353 if (types_compatible_p (orig_type, TREE_TYPE (base))
354 && integer_zerop (offset))
355 return base;
357 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
358 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
359 return ret;
360 return NULL_TREE;
363 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
364 LOC is the location of the original expression. */
366 tree
367 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
368 tree orig_type)
370 tree base, ret;
372 STRIP_NOPS (addr);
373 if (TREE_CODE (addr) != ADDR_EXPR)
374 return NULL_TREE;
375 base = TREE_OPERAND (addr, 0);
376 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
377 if (ret)
379 ret = build_fold_addr_expr (ret);
380 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
381 return NULL_TREE;
382 SET_EXPR_LOCATION (ret, loc);
385 return ret;
389 /* A quaint feature extant in our address arithmetic is that there
390 can be hidden type changes here. The type of the result need
391 not be the same as the type of the input pointer.
393 What we're after here is an expression of the form
394 (T *)(&array + const)
395 where array is OP0, const is OP1, RES_TYPE is T and
396 the cast doesn't actually exist, but is implicit in the
397 type of the POINTER_PLUS_EXPR. We'd like to turn this into
398 &array[x]
399 which may be able to propagate further. */
401 tree
402 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
404 tree ptd_type;
405 tree t;
407 /* The first operand should be an ADDR_EXPR. */
408 if (TREE_CODE (op0) != ADDR_EXPR)
409 return NULL_TREE;
410 op0 = TREE_OPERAND (op0, 0);
412 /* It had better be a constant. */
413 if (TREE_CODE (op1) != INTEGER_CST)
415 /* Or op0 should now be A[0] and the non-constant offset defined
416 via a multiplication by the array element size. */
417 if (TREE_CODE (op0) == ARRAY_REF
418 /* As we will end up creating a variable index array access
419 in the outermost array dimension make sure there isn't
420 a more inner array that the index could overflow to. */
421 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
422 && integer_zerop (TREE_OPERAND (op0, 1))
423 && TREE_CODE (op1) == SSA_NAME)
425 gimple offset_def = SSA_NAME_DEF_STMT (op1);
426 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
427 if (!host_integerp (elsz, 1)
428 || !is_gimple_assign (offset_def))
429 return NULL_TREE;
431 /* Do not build array references of something that we can't
432 see the true number of array dimensions for. */
433 if (!DECL_P (TREE_OPERAND (op0, 0))
434 && !handled_component_p (TREE_OPERAND (op0, 0)))
435 return NULL_TREE;
437 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
438 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
439 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
440 return build_fold_addr_expr
441 (build4 (ARRAY_REF, TREE_TYPE (op0),
442 TREE_OPERAND (op0, 0),
443 gimple_assign_rhs1 (offset_def),
444 TREE_OPERAND (op0, 2),
445 TREE_OPERAND (op0, 3)));
446 else if (integer_onep (elsz)
447 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
448 return build_fold_addr_expr
449 (build4 (ARRAY_REF, TREE_TYPE (op0),
450 TREE_OPERAND (op0, 0),
451 op1,
452 TREE_OPERAND (op0, 2),
453 TREE_OPERAND (op0, 3)));
455 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
456 /* Dto. */
457 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
458 && TREE_CODE (op1) == SSA_NAME)
460 gimple offset_def = SSA_NAME_DEF_STMT (op1);
461 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
462 if (!host_integerp (elsz, 1)
463 || !is_gimple_assign (offset_def))
464 return NULL_TREE;
466 /* Do not build array references of something that we can't
467 see the true number of array dimensions for. */
468 if (!DECL_P (op0)
469 && !handled_component_p (op0))
470 return NULL_TREE;
472 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
473 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
474 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
475 return build_fold_addr_expr
476 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
477 op0, gimple_assign_rhs1 (offset_def),
478 integer_zero_node, NULL_TREE));
479 else if (integer_onep (elsz)
480 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
481 return build_fold_addr_expr
482 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
483 op0, op1,
484 integer_zero_node, NULL_TREE));
487 return NULL_TREE;
490 /* If the first operand is an ARRAY_REF, expand it so that we can fold
491 the offset into it. */
492 while (TREE_CODE (op0) == ARRAY_REF)
494 tree array_obj = TREE_OPERAND (op0, 0);
495 tree array_idx = TREE_OPERAND (op0, 1);
496 tree elt_type = TREE_TYPE (op0);
497 tree elt_size = TYPE_SIZE_UNIT (elt_type);
498 tree min_idx;
500 if (TREE_CODE (array_idx) != INTEGER_CST)
501 break;
502 if (TREE_CODE (elt_size) != INTEGER_CST)
503 break;
505 /* Un-bias the index by the min index of the array type. */
506 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
507 if (min_idx)
509 min_idx = TYPE_MIN_VALUE (min_idx);
510 if (min_idx)
512 if (TREE_CODE (min_idx) != INTEGER_CST)
513 break;
515 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
516 if (!integer_zerop (min_idx))
517 array_idx = int_const_binop (MINUS_EXPR, array_idx,
518 min_idx, 0);
522 /* Convert the index to a byte offset. */
523 array_idx = fold_convert (sizetype, array_idx);
524 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
526 /* Update the operands for the next round, or for folding. */
527 op1 = int_const_binop (PLUS_EXPR,
528 array_idx, op1, 0);
529 op0 = array_obj;
532 ptd_type = TREE_TYPE (res_type);
533 /* If we want a pointer to void, reconstruct the reference from the
534 array element type. A pointer to that can be trivially converted
535 to void *. This happens as we fold (void *)(ptr p+ off). */
536 if (VOID_TYPE_P (ptd_type)
537 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
538 ptd_type = TREE_TYPE (TREE_TYPE (op0));
540 /* At which point we can try some of the same things as for indirects. */
541 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
542 if (t)
544 t = build_fold_addr_expr (t);
545 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
546 return NULL_TREE;
547 SET_EXPR_LOCATION (t, loc);
550 return t;
553 /* Subroutine of fold_stmt. We perform several simplifications of the
554 memory reference tree EXPR and make sure to re-gimplify them properly
555 after propagation of constant addresses. IS_LHS is true if the
556 reference is supposed to be an lvalue. */
558 static tree
559 maybe_fold_reference (tree expr, bool is_lhs)
561 tree *t = &expr;
562 tree result;
564 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
565 || TREE_CODE (expr) == REALPART_EXPR
566 || TREE_CODE (expr) == IMAGPART_EXPR)
567 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
568 return fold_unary_loc (EXPR_LOCATION (expr),
569 TREE_CODE (expr),
570 TREE_TYPE (expr),
571 TREE_OPERAND (expr, 0));
572 else if (TREE_CODE (expr) == BIT_FIELD_REF
573 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
574 return fold_ternary_loc (EXPR_LOCATION (expr),
575 TREE_CODE (expr),
576 TREE_TYPE (expr),
577 TREE_OPERAND (expr, 0),
578 TREE_OPERAND (expr, 1),
579 TREE_OPERAND (expr, 2));
581 while (handled_component_p (*t))
582 t = &TREE_OPERAND (*t, 0);
584 /* Canonicalize MEM_REFs invariant address operand. Do this first
585 to avoid feeding non-canonical MEM_REFs elsewhere. */
586 if (TREE_CODE (*t) == MEM_REF
587 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
589 bool volatile_p = TREE_THIS_VOLATILE (*t);
590 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
591 TREE_OPERAND (*t, 0),
592 TREE_OPERAND (*t, 1));
593 if (tem)
595 TREE_THIS_VOLATILE (tem) = volatile_p;
596 *t = tem;
597 tem = maybe_fold_reference (expr, is_lhs);
598 if (tem)
599 return tem;
600 return expr;
604 if (!is_lhs
605 && (result = fold_const_aggregate_ref (expr))
606 && is_gimple_min_invariant (result))
607 return result;
609 /* Fold back MEM_REFs to reference trees. */
610 if (TREE_CODE (*t) == MEM_REF
611 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
612 && integer_zerop (TREE_OPERAND (*t, 1))
613 && (TREE_THIS_VOLATILE (*t)
614 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
615 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
616 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
617 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
618 /* We have to look out here to not drop a required conversion
619 from the rhs to the lhs if is_lhs, but we don't have the
620 rhs here to verify that. Thus require strict type
621 compatibility. */
622 && types_compatible_p (TREE_TYPE (*t),
623 TREE_TYPE (TREE_OPERAND
624 (TREE_OPERAND (*t, 0), 0))))
626 tree tem;
627 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
628 tem = maybe_fold_reference (expr, is_lhs);
629 if (tem)
630 return tem;
631 return expr;
633 else if (TREE_CODE (*t) == TARGET_MEM_REF)
635 tree tem = maybe_fold_tmr (*t);
636 if (tem)
638 *t = tem;
639 tem = maybe_fold_reference (expr, is_lhs);
640 if (tem)
641 return tem;
642 return expr;
646 return NULL_TREE;
650 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
651 replacement rhs for the statement or NULL_TREE if no simplification
652 could be made. It is assumed that the operands have been previously
653 folded. */
655 static tree
656 fold_gimple_assign (gimple_stmt_iterator *si)
658 gimple stmt = gsi_stmt (*si);
659 enum tree_code subcode = gimple_assign_rhs_code (stmt);
660 location_t loc = gimple_location (stmt);
662 tree result = NULL_TREE;
664 switch (get_gimple_rhs_class (subcode))
666 case GIMPLE_SINGLE_RHS:
668 tree rhs = gimple_assign_rhs1 (stmt);
670 /* Try to fold a conditional expression. */
671 if (TREE_CODE (rhs) == COND_EXPR)
673 tree op0 = COND_EXPR_COND (rhs);
674 tree tem;
675 bool set = false;
676 location_t cond_loc = EXPR_LOCATION (rhs);
678 if (COMPARISON_CLASS_P (op0))
680 fold_defer_overflow_warnings ();
681 tem = fold_binary_loc (cond_loc,
682 TREE_CODE (op0), TREE_TYPE (op0),
683 TREE_OPERAND (op0, 0),
684 TREE_OPERAND (op0, 1));
685 /* This is actually a conditional expression, not a GIMPLE
686 conditional statement, however, the valid_gimple_rhs_p
687 test still applies. */
688 set = (tem && is_gimple_condexpr (tem)
689 && valid_gimple_rhs_p (tem));
690 fold_undefer_overflow_warnings (set, stmt, 0);
692 else if (is_gimple_min_invariant (op0))
694 tem = op0;
695 set = true;
697 else
698 return NULL_TREE;
700 if (set)
701 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
702 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
705 else if (REFERENCE_CLASS_P (rhs))
706 return maybe_fold_reference (rhs, false);
708 else if (TREE_CODE (rhs) == ADDR_EXPR)
710 tree ref = TREE_OPERAND (rhs, 0);
711 tree tem = maybe_fold_reference (ref, true);
712 if (tem
713 && TREE_CODE (tem) == MEM_REF
714 && integer_zerop (TREE_OPERAND (tem, 1)))
715 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
716 else if (tem)
717 result = fold_convert (TREE_TYPE (rhs),
718 build_fold_addr_expr_loc (loc, tem));
719 else if (TREE_CODE (ref) == MEM_REF
720 && integer_zerop (TREE_OPERAND (ref, 1)))
721 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
724 else if (TREE_CODE (rhs) == CONSTRUCTOR
725 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
726 && (CONSTRUCTOR_NELTS (rhs)
727 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
729 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
730 unsigned i;
731 tree val;
733 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
734 if (TREE_CODE (val) != INTEGER_CST
735 && TREE_CODE (val) != REAL_CST
736 && TREE_CODE (val) != FIXED_CST)
737 return NULL_TREE;
739 return build_vector_from_ctor (TREE_TYPE (rhs),
740 CONSTRUCTOR_ELTS (rhs));
743 else if (DECL_P (rhs))
744 return unshare_expr (get_symbol_constant_value (rhs));
746 /* If we couldn't fold the RHS, hand over to the generic
747 fold routines. */
748 if (result == NULL_TREE)
749 result = fold (rhs);
751 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
752 that may have been added by fold, and "useless" type
753 conversions that might now be apparent due to propagation. */
754 STRIP_USELESS_TYPE_CONVERSION (result);
756 if (result != rhs && valid_gimple_rhs_p (result))
757 return result;
759 return NULL_TREE;
761 break;
763 case GIMPLE_UNARY_RHS:
765 tree rhs = gimple_assign_rhs1 (stmt);
767 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
768 if (result)
770 /* If the operation was a conversion do _not_ mark a
771 resulting constant with TREE_OVERFLOW if the original
772 constant was not. These conversions have implementation
773 defined behavior and retaining the TREE_OVERFLOW flag
774 here would confuse later passes such as VRP. */
775 if (CONVERT_EXPR_CODE_P (subcode)
776 && TREE_CODE (result) == INTEGER_CST
777 && TREE_CODE (rhs) == INTEGER_CST)
778 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
780 STRIP_USELESS_TYPE_CONVERSION (result);
781 if (valid_gimple_rhs_p (result))
782 return result;
784 else if (CONVERT_EXPR_CODE_P (subcode)
785 && POINTER_TYPE_P (gimple_expr_type (stmt))
786 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
788 tree type = gimple_expr_type (stmt);
789 tree t = maybe_fold_offset_to_address (loc,
790 gimple_assign_rhs1 (stmt),
791 integer_zero_node, type);
792 if (t)
793 return t;
796 break;
798 case GIMPLE_BINARY_RHS:
799 /* Try to fold pointer addition. */
800 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
802 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
803 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
805 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
806 if (!useless_type_conversion_p
807 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
808 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
810 result = maybe_fold_stmt_addition (gimple_location (stmt),
811 type,
812 gimple_assign_rhs1 (stmt),
813 gimple_assign_rhs2 (stmt));
816 if (!result)
817 result = fold_binary_loc (loc, subcode,
818 TREE_TYPE (gimple_assign_lhs (stmt)),
819 gimple_assign_rhs1 (stmt),
820 gimple_assign_rhs2 (stmt));
822 if (result)
824 STRIP_USELESS_TYPE_CONVERSION (result);
825 if (valid_gimple_rhs_p (result))
826 return result;
828 /* Fold might have produced non-GIMPLE, so if we trust it blindly
829 we lose canonicalization opportunities. Do not go again
830 through fold here though, or the same non-GIMPLE will be
831 produced. */
832 if (commutative_tree_code (subcode)
833 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
834 gimple_assign_rhs2 (stmt), false))
835 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
836 gimple_assign_rhs2 (stmt),
837 gimple_assign_rhs1 (stmt));
839 break;
841 case GIMPLE_TERNARY_RHS:
842 result = fold_ternary_loc (loc, subcode,
843 TREE_TYPE (gimple_assign_lhs (stmt)),
844 gimple_assign_rhs1 (stmt),
845 gimple_assign_rhs2 (stmt),
846 gimple_assign_rhs3 (stmt));
848 if (result)
850 STRIP_USELESS_TYPE_CONVERSION (result);
851 if (valid_gimple_rhs_p (result))
852 return result;
854 /* Fold might have produced non-GIMPLE, so if we trust it blindly
855 we lose canonicalization opportunities. Do not go again
856 through fold here though, or the same non-GIMPLE will be
857 produced. */
858 if (commutative_ternary_tree_code (subcode)
859 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
860 gimple_assign_rhs2 (stmt), false))
861 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
862 gimple_assign_rhs2 (stmt),
863 gimple_assign_rhs1 (stmt),
864 gimple_assign_rhs3 (stmt));
866 break;
868 case GIMPLE_INVALID_RHS:
869 gcc_unreachable ();
872 return NULL_TREE;
875 /* Attempt to fold a conditional statement. Return true if any changes were
876 made. We only attempt to fold the condition expression, and do not perform
877 any transformation that would require alteration of the cfg. It is
878 assumed that the operands have been previously folded. */
880 static bool
881 fold_gimple_cond (gimple stmt)
883 tree result = fold_binary_loc (gimple_location (stmt),
884 gimple_cond_code (stmt),
885 boolean_type_node,
886 gimple_cond_lhs (stmt),
887 gimple_cond_rhs (stmt));
889 if (result)
891 STRIP_USELESS_TYPE_CONVERSION (result);
892 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
894 gimple_cond_set_condition_from_tree (stmt, result);
895 return true;
899 return false;
902 /* Convert EXPR into a GIMPLE value suitable for substitution on the
903 RHS of an assignment. Insert the necessary statements before
904 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
905 is replaced. If the call is expected to produces a result, then it
906 is replaced by an assignment of the new RHS to the result variable.
907 If the result is to be ignored, then the call is replaced by a
908 GIMPLE_NOP. A proper VDEF chain is retained by making the first
909 VUSE and the last VDEF of the whole sequence be the same as the replaced
910 statement and using new SSA names for stores in between. */
912 void
913 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
915 tree lhs;
916 tree tmp = NULL_TREE; /* Silence warning. */
917 gimple stmt, new_stmt;
918 gimple_stmt_iterator i;
919 gimple_seq stmts = gimple_seq_alloc();
920 struct gimplify_ctx gctx;
921 gimple last = NULL;
922 gimple laststore = NULL;
923 tree reaching_vuse;
925 stmt = gsi_stmt (*si_p);
927 gcc_assert (is_gimple_call (stmt));
929 lhs = gimple_call_lhs (stmt);
930 reaching_vuse = gimple_vuse (stmt);
932 push_gimplify_context (&gctx);
934 if (lhs == NULL_TREE)
936 gimplify_and_add (expr, &stmts);
937 /* We can end up with folding a memcpy of an empty class assignment
938 which gets optimized away by C++ gimplification. */
939 if (gimple_seq_empty_p (stmts))
941 pop_gimplify_context (NULL);
942 if (gimple_in_ssa_p (cfun))
944 unlink_stmt_vdef (stmt);
945 release_defs (stmt);
947 gsi_remove (si_p, true);
948 return;
951 else
952 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
954 pop_gimplify_context (NULL);
956 if (gimple_has_location (stmt))
957 annotate_all_with_location (stmts, gimple_location (stmt));
959 /* The replacement can expose previously unreferenced variables. */
960 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
962 if (last)
964 gsi_insert_before (si_p, last, GSI_NEW_STMT);
965 gsi_next (si_p);
967 new_stmt = gsi_stmt (i);
968 if (gimple_in_ssa_p (cfun))
970 find_new_referenced_vars (new_stmt);
971 mark_symbols_for_renaming (new_stmt);
973 /* If the new statement has a VUSE, update it with exact SSA name we
974 know will reach this one. */
975 if (gimple_vuse (new_stmt))
977 /* If we've also seen a previous store create a new VDEF for
978 the latter one, and make that the new reaching VUSE. */
979 if (laststore)
981 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
982 gimple_set_vdef (laststore, reaching_vuse);
983 update_stmt (laststore);
984 laststore = NULL;
986 gimple_set_vuse (new_stmt, reaching_vuse);
987 gimple_set_modified (new_stmt, true);
989 if (gimple_assign_single_p (new_stmt)
990 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
992 laststore = new_stmt;
994 last = new_stmt;
997 if (lhs == NULL_TREE)
999 /* If we replace a call without LHS that has a VDEF and our new
1000 sequence ends with a store we must make that store have the same
1001 vdef in order not to break the sequencing. This can happen
1002 for instance when folding memcpy calls into assignments. */
1003 if (gimple_vdef (stmt) && laststore)
1005 gimple_set_vdef (laststore, gimple_vdef (stmt));
1006 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1007 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1008 update_stmt (laststore);
1010 else if (gimple_in_ssa_p (cfun))
1012 unlink_stmt_vdef (stmt);
1013 release_defs (stmt);
1015 new_stmt = last;
1017 else
1019 if (last)
1021 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1022 gsi_next (si_p);
1024 if (laststore && is_gimple_reg (lhs))
1026 gimple_set_vdef (laststore, gimple_vdef (stmt));
1027 update_stmt (laststore);
1028 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1029 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1030 laststore = NULL;
1032 else if (laststore)
1034 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1035 gimple_set_vdef (laststore, reaching_vuse);
1036 update_stmt (laststore);
1037 laststore = NULL;
1039 new_stmt = gimple_build_assign (lhs, tmp);
1040 if (!is_gimple_reg (tmp))
1041 gimple_set_vuse (new_stmt, reaching_vuse);
1042 if (!is_gimple_reg (lhs))
1044 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1045 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1046 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1048 else if (reaching_vuse == gimple_vuse (stmt))
1049 unlink_stmt_vdef (stmt);
1052 gimple_set_location (new_stmt, gimple_location (stmt));
1053 gsi_replace (si_p, new_stmt, false);
1056 /* Return the string length, maximum string length or maximum value of
1057 ARG in LENGTH.
1058 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1059 is not NULL and, for TYPE == 0, its value is not equal to the length
1060 we determine or if we are unable to determine the length or value,
1061 return false. VISITED is a bitmap of visited variables.
1062 TYPE is 0 if string length should be returned, 1 for maximum string
1063 length and 2 for maximum value ARG can have. */
1065 static bool
1066 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1068 tree var, val;
1069 gimple def_stmt;
1071 if (TREE_CODE (arg) != SSA_NAME)
1073 if (TREE_CODE (arg) == COND_EXPR)
1074 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1075 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1076 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1077 else if (TREE_CODE (arg) == ADDR_EXPR
1078 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1079 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1081 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1082 if (TREE_CODE (aop0) == INDIRECT_REF
1083 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1084 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1085 length, visited, type);
1088 if (type == 2)
1090 val = arg;
1091 if (TREE_CODE (val) != INTEGER_CST
1092 || tree_int_cst_sgn (val) < 0)
1093 return false;
1095 else
1096 val = c_strlen (arg, 1);
1097 if (!val)
1098 return false;
1100 if (*length)
1102 if (type > 0)
1104 if (TREE_CODE (*length) != INTEGER_CST
1105 || TREE_CODE (val) != INTEGER_CST)
1106 return false;
1108 if (tree_int_cst_lt (*length, val))
1109 *length = val;
1110 return true;
1112 else if (simple_cst_equal (val, *length) != 1)
1113 return false;
1116 *length = val;
1117 return true;
1120 /* If we were already here, break the infinite cycle. */
1121 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1122 return true;
1124 var = arg;
1125 def_stmt = SSA_NAME_DEF_STMT (var);
1127 switch (gimple_code (def_stmt))
1129 case GIMPLE_ASSIGN:
1130 /* The RHS of the statement defining VAR must either have a
1131 constant length or come from another SSA_NAME with a constant
1132 length. */
1133 if (gimple_assign_single_p (def_stmt)
1134 || gimple_assign_unary_nop_p (def_stmt))
1136 tree rhs = gimple_assign_rhs1 (def_stmt);
1137 return get_maxval_strlen (rhs, length, visited, type);
1139 return false;
1141 case GIMPLE_PHI:
1143 /* All the arguments of the PHI node must have the same constant
1144 length. */
1145 unsigned i;
1147 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1149 tree arg = gimple_phi_arg (def_stmt, i)->def;
1151 /* If this PHI has itself as an argument, we cannot
1152 determine the string length of this argument. However,
1153 if we can find a constant string length for the other
1154 PHI args then we can still be sure that this is a
1155 constant string length. So be optimistic and just
1156 continue with the next argument. */
1157 if (arg == gimple_phi_result (def_stmt))
1158 continue;
1160 if (!get_maxval_strlen (arg, length, visited, type))
1161 return false;
1164 return true;
1166 default:
1167 return false;
1172 /* Fold builtin call in statement STMT. Returns a simplified tree.
1173 We may return a non-constant expression, including another call
1174 to a different function and with different arguments, e.g.,
1175 substituting memcpy for strcpy when the string length is known.
1176 Note that some builtins expand into inline code that may not
1177 be valid in GIMPLE. Callers must take care. */
1179 tree
1180 gimple_fold_builtin (gimple stmt)
1182 tree result, val[3];
1183 tree callee, a;
1184 int arg_idx, type;
1185 bitmap visited;
1186 bool ignore;
1187 int nargs;
1188 location_t loc = gimple_location (stmt);
1190 gcc_assert (is_gimple_call (stmt));
1192 ignore = (gimple_call_lhs (stmt) == NULL);
1194 /* First try the generic builtin folder. If that succeeds, return the
1195 result directly. */
1196 result = fold_call_stmt (stmt, ignore);
1197 if (result)
1199 if (ignore)
1200 STRIP_NOPS (result);
1201 return result;
1204 /* Ignore MD builtins. */
1205 callee = gimple_call_fndecl (stmt);
1206 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1207 return NULL_TREE;
1209 /* If the builtin could not be folded, and it has no argument list,
1210 we're done. */
1211 nargs = gimple_call_num_args (stmt);
1212 if (nargs == 0)
1213 return NULL_TREE;
1215 /* Limit the work only for builtins we know how to simplify. */
1216 switch (DECL_FUNCTION_CODE (callee))
1218 case BUILT_IN_STRLEN:
1219 case BUILT_IN_FPUTS:
1220 case BUILT_IN_FPUTS_UNLOCKED:
1221 arg_idx = 0;
1222 type = 0;
1223 break;
1224 case BUILT_IN_STRCPY:
1225 case BUILT_IN_STRNCPY:
1226 arg_idx = 1;
1227 type = 0;
1228 break;
1229 case BUILT_IN_MEMCPY_CHK:
1230 case BUILT_IN_MEMPCPY_CHK:
1231 case BUILT_IN_MEMMOVE_CHK:
1232 case BUILT_IN_MEMSET_CHK:
1233 case BUILT_IN_STRNCPY_CHK:
1234 arg_idx = 2;
1235 type = 2;
1236 break;
1237 case BUILT_IN_STRCPY_CHK:
1238 case BUILT_IN_STPCPY_CHK:
1239 arg_idx = 1;
1240 type = 1;
1241 break;
1242 case BUILT_IN_SNPRINTF_CHK:
1243 case BUILT_IN_VSNPRINTF_CHK:
1244 arg_idx = 1;
1245 type = 2;
1246 break;
1247 default:
1248 return NULL_TREE;
1251 if (arg_idx >= nargs)
1252 return NULL_TREE;
1254 /* Try to use the dataflow information gathered by the CCP process. */
1255 visited = BITMAP_ALLOC (NULL);
1256 bitmap_clear (visited);
1258 memset (val, 0, sizeof (val));
1259 a = gimple_call_arg (stmt, arg_idx);
1260 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1261 val[arg_idx] = NULL_TREE;
1263 BITMAP_FREE (visited);
1265 result = NULL_TREE;
1266 switch (DECL_FUNCTION_CODE (callee))
1268 case BUILT_IN_STRLEN:
1269 if (val[0] && nargs == 1)
1271 tree new_val =
1272 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1274 /* If the result is not a valid gimple value, or not a cast
1275 of a valid gimple value, then we cannot use the result. */
1276 if (is_gimple_val (new_val)
1277 || (CONVERT_EXPR_P (new_val)
1278 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1279 return new_val;
1281 break;
1283 case BUILT_IN_STRCPY:
1284 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1285 result = fold_builtin_strcpy (loc, callee,
1286 gimple_call_arg (stmt, 0),
1287 gimple_call_arg (stmt, 1),
1288 val[1]);
1289 break;
1291 case BUILT_IN_STRNCPY:
1292 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1293 result = fold_builtin_strncpy (loc, callee,
1294 gimple_call_arg (stmt, 0),
1295 gimple_call_arg (stmt, 1),
1296 gimple_call_arg (stmt, 2),
1297 val[1]);
1298 break;
1300 case BUILT_IN_FPUTS:
1301 if (nargs == 2)
1302 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1303 gimple_call_arg (stmt, 1),
1304 ignore, false, val[0]);
1305 break;
1307 case BUILT_IN_FPUTS_UNLOCKED:
1308 if (nargs == 2)
1309 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1310 gimple_call_arg (stmt, 1),
1311 ignore, true, val[0]);
1312 break;
1314 case BUILT_IN_MEMCPY_CHK:
1315 case BUILT_IN_MEMPCPY_CHK:
1316 case BUILT_IN_MEMMOVE_CHK:
1317 case BUILT_IN_MEMSET_CHK:
1318 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1319 result = fold_builtin_memory_chk (loc, callee,
1320 gimple_call_arg (stmt, 0),
1321 gimple_call_arg (stmt, 1),
1322 gimple_call_arg (stmt, 2),
1323 gimple_call_arg (stmt, 3),
1324 val[2], ignore,
1325 DECL_FUNCTION_CODE (callee));
1326 break;
1328 case BUILT_IN_STRCPY_CHK:
1329 case BUILT_IN_STPCPY_CHK:
1330 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1331 result = fold_builtin_stxcpy_chk (loc, callee,
1332 gimple_call_arg (stmt, 0),
1333 gimple_call_arg (stmt, 1),
1334 gimple_call_arg (stmt, 2),
1335 val[1], ignore,
1336 DECL_FUNCTION_CODE (callee));
1337 break;
1339 case BUILT_IN_STRNCPY_CHK:
1340 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1341 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1342 gimple_call_arg (stmt, 1),
1343 gimple_call_arg (stmt, 2),
1344 gimple_call_arg (stmt, 3),
1345 val[2]);
1346 break;
1348 case BUILT_IN_SNPRINTF_CHK:
1349 case BUILT_IN_VSNPRINTF_CHK:
1350 if (val[1] && is_gimple_val (val[1]))
1351 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1352 DECL_FUNCTION_CODE (callee));
1353 break;
1355 default:
1356 gcc_unreachable ();
1359 if (result && ignore)
1360 result = fold_ignored_result (result);
1361 return result;
1364 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1365 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1366 KNOWN_BINFO carries the binfo describing the true type of
1367 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1368 with a this adjustment, the constant which should be added to this pointer
1369 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1370 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1372 tree
1373 gimple_get_virt_mehtod_for_binfo (HOST_WIDE_INT token, tree known_binfo,
1374 tree *delta, bool refuse_thunks)
1376 HOST_WIDE_INT i;
1377 tree v, fndecl;
1378 struct cgraph_node *node;
1380 v = BINFO_VIRTUALS (known_binfo);
1381 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1382 if (!v)
1383 return NULL_TREE;
1384 i = 0;
1385 while (i != token)
1387 i += (TARGET_VTABLE_USES_DESCRIPTORS
1388 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1389 v = TREE_CHAIN (v);
1392 fndecl = TREE_VALUE (v);
1393 node = cgraph_get_node_or_alias (fndecl);
1394 if (refuse_thunks
1395 && (!node
1396 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1397 thunks are represented by a constant in TREE_PURPOSE of items in
1398 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1399 yet.
1401 FIXME: Remove the following condition once we are able to represent
1402 thunk information on call graph edges. */
1403 || (node->same_body_alias && node->thunk.thunk_p)))
1404 return NULL_TREE;
1406 /* When cgraph node is missing and function is not public, we cannot
1407 devirtualize. This can happen in WHOPR when the actual method
1408 ends up in other partition, because we found devirtualization
1409 possibility too late. */
1410 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1411 return NULL_TREE;
1413 *delta = TREE_PURPOSE (v);
1414 gcc_checking_assert (host_integerp (*delta, 0));
1415 return fndecl;
1418 /* Generate code adjusting the first parameter of a call statement determined
1419 by GSI by DELTA. */
1421 void
1422 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1424 gimple call_stmt = gsi_stmt (*gsi);
1425 tree parm, tmp;
1426 gimple new_stmt;
1428 delta = fold_convert (sizetype, delta);
1429 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1430 parm = gimple_call_arg (call_stmt, 0);
1431 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1432 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1433 add_referenced_var (tmp);
1435 tmp = make_ssa_name (tmp, NULL);
1436 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1437 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1438 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1439 gimple_call_set_arg (call_stmt, 0, tmp);
1442 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1443 The statement may be replaced by another statement, e.g., if the call
1444 simplifies to a constant value. Return true if any changes were made.
1445 It is assumed that the operands have been previously folded. */
1447 bool
1448 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1450 gimple stmt = gsi_stmt (*gsi);
1452 tree callee = gimple_call_fndecl (stmt);
1454 /* Check for builtins that CCP can handle using information not
1455 available in the generic fold routines. */
1456 if (!inplace && callee && DECL_BUILT_IN (callee))
1458 tree result = gimple_fold_builtin (stmt);
1460 if (result)
1462 if (!update_call_from_tree (gsi, result))
1463 gimplify_and_update_call_from_tree (gsi, result);
1464 return true;
1467 return false;
1470 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1471 distinguishes both cases. */
1473 static bool
1474 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1476 bool changed = false;
1477 gimple stmt = gsi_stmt (*gsi);
1478 unsigned i;
1480 /* Fold the main computation performed by the statement. */
1481 switch (gimple_code (stmt))
1483 case GIMPLE_ASSIGN:
1485 unsigned old_num_ops = gimple_num_ops (stmt);
1486 tree new_rhs = fold_gimple_assign (gsi);
1487 tree lhs = gimple_assign_lhs (stmt);
1488 if (new_rhs
1489 && !useless_type_conversion_p (TREE_TYPE (lhs),
1490 TREE_TYPE (new_rhs)))
1491 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1492 if (new_rhs
1493 && (!inplace
1494 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1496 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1497 changed = true;
1499 break;
1502 case GIMPLE_COND:
1503 changed |= fold_gimple_cond (stmt);
1504 break;
1506 case GIMPLE_CALL:
1507 /* Fold *& in call arguments. */
1508 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1509 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1511 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1512 if (tmp)
1514 gimple_call_set_arg (stmt, i, tmp);
1515 changed = true;
1518 changed |= gimple_fold_call (gsi, inplace);
1519 break;
1521 case GIMPLE_ASM:
1522 /* Fold *& in asm operands. */
1523 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1525 tree link = gimple_asm_output_op (stmt, i);
1526 tree op = TREE_VALUE (link);
1527 if (REFERENCE_CLASS_P (op)
1528 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1530 TREE_VALUE (link) = op;
1531 changed = true;
1534 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1536 tree link = gimple_asm_input_op (stmt, i);
1537 tree op = TREE_VALUE (link);
1538 if (REFERENCE_CLASS_P (op)
1539 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1541 TREE_VALUE (link) = op;
1542 changed = true;
1545 break;
1547 case GIMPLE_DEBUG:
1548 if (gimple_debug_bind_p (stmt))
1550 tree val = gimple_debug_bind_get_value (stmt);
1551 if (val
1552 && REFERENCE_CLASS_P (val))
1554 tree tem = maybe_fold_reference (val, false);
1555 if (tem)
1557 gimple_debug_bind_set_value (stmt, tem);
1558 changed = true;
1562 break;
1564 default:;
1567 stmt = gsi_stmt (*gsi);
1569 /* Fold *& on the lhs. */
1570 if (gimple_has_lhs (stmt))
1572 tree lhs = gimple_get_lhs (stmt);
1573 if (lhs && REFERENCE_CLASS_P (lhs))
1575 tree new_lhs = maybe_fold_reference (lhs, true);
1576 if (new_lhs)
1578 gimple_set_lhs (stmt, new_lhs);
1579 changed = true;
1584 return changed;
1587 /* Fold the statement pointed to by GSI. In some cases, this function may
1588 replace the whole statement with a new one. Returns true iff folding
1589 makes any changes.
1590 The statement pointed to by GSI should be in valid gimple form but may
1591 be in unfolded state as resulting from for example constant propagation
1592 which can produce *&x = 0. */
1594 bool
1595 fold_stmt (gimple_stmt_iterator *gsi)
1597 return fold_stmt_1 (gsi, false);
1600 /* Perform the minimal folding on statement STMT. Only operations like
1601 *&x created by constant propagation are handled. The statement cannot
1602 be replaced with a new one. Return true if the statement was
1603 changed, false otherwise.
1604 The statement STMT should be in valid gimple form but may
1605 be in unfolded state as resulting from for example constant propagation
1606 which can produce *&x = 0. */
1608 bool
1609 fold_stmt_inplace (gimple stmt)
1611 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1612 bool changed = fold_stmt_1 (&gsi, true);
1613 gcc_assert (gsi_stmt (gsi) == stmt);
1614 return changed;
1617 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1618 if EXPR is null or we don't know how.
1619 If non-null, the result always has boolean type. */
1621 static tree
1622 canonicalize_bool (tree expr, bool invert)
1624 if (!expr)
1625 return NULL_TREE;
1626 else if (invert)
1628 if (integer_nonzerop (expr))
1629 return boolean_false_node;
1630 else if (integer_zerop (expr))
1631 return boolean_true_node;
1632 else if (TREE_CODE (expr) == SSA_NAME)
1633 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1634 build_int_cst (TREE_TYPE (expr), 0));
1635 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1636 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1637 boolean_type_node,
1638 TREE_OPERAND (expr, 0),
1639 TREE_OPERAND (expr, 1));
1640 else
1641 return NULL_TREE;
1643 else
1645 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1646 return expr;
1647 if (integer_nonzerop (expr))
1648 return boolean_true_node;
1649 else if (integer_zerop (expr))
1650 return boolean_false_node;
1651 else if (TREE_CODE (expr) == SSA_NAME)
1652 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1653 build_int_cst (TREE_TYPE (expr), 0));
1654 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1655 return fold_build2 (TREE_CODE (expr),
1656 boolean_type_node,
1657 TREE_OPERAND (expr, 0),
1658 TREE_OPERAND (expr, 1));
1659 else
1660 return NULL_TREE;
1664 /* Check to see if a boolean expression EXPR is logically equivalent to the
1665 comparison (OP1 CODE OP2). Check for various identities involving
1666 SSA_NAMEs. */
1668 static bool
1669 same_bool_comparison_p (const_tree expr, enum tree_code code,
1670 const_tree op1, const_tree op2)
1672 gimple s;
1674 /* The obvious case. */
1675 if (TREE_CODE (expr) == code
1676 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1677 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1678 return true;
1680 /* Check for comparing (name, name != 0) and the case where expr
1681 is an SSA_NAME with a definition matching the comparison. */
1682 if (TREE_CODE (expr) == SSA_NAME
1683 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1685 if (operand_equal_p (expr, op1, 0))
1686 return ((code == NE_EXPR && integer_zerop (op2))
1687 || (code == EQ_EXPR && integer_nonzerop (op2)));
1688 s = SSA_NAME_DEF_STMT (expr);
1689 if (is_gimple_assign (s)
1690 && gimple_assign_rhs_code (s) == code
1691 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1692 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1693 return true;
1696 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1697 of name is a comparison, recurse. */
1698 if (TREE_CODE (op1) == SSA_NAME
1699 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1701 s = SSA_NAME_DEF_STMT (op1);
1702 if (is_gimple_assign (s)
1703 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1705 enum tree_code c = gimple_assign_rhs_code (s);
1706 if ((c == NE_EXPR && integer_zerop (op2))
1707 || (c == EQ_EXPR && integer_nonzerop (op2)))
1708 return same_bool_comparison_p (expr, c,
1709 gimple_assign_rhs1 (s),
1710 gimple_assign_rhs2 (s));
1711 if ((c == EQ_EXPR && integer_zerop (op2))
1712 || (c == NE_EXPR && integer_nonzerop (op2)))
1713 return same_bool_comparison_p (expr,
1714 invert_tree_comparison (c, false),
1715 gimple_assign_rhs1 (s),
1716 gimple_assign_rhs2 (s));
1719 return false;
1722 /* Check to see if two boolean expressions OP1 and OP2 are logically
1723 equivalent. */
1725 static bool
1726 same_bool_result_p (const_tree op1, const_tree op2)
1728 /* Simple cases first. */
1729 if (operand_equal_p (op1, op2, 0))
1730 return true;
1732 /* Check the cases where at least one of the operands is a comparison.
1733 These are a bit smarter than operand_equal_p in that they apply some
1734 identifies on SSA_NAMEs. */
1735 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1736 && same_bool_comparison_p (op1, TREE_CODE (op2),
1737 TREE_OPERAND (op2, 0),
1738 TREE_OPERAND (op2, 1)))
1739 return true;
1740 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1741 && same_bool_comparison_p (op2, TREE_CODE (op1),
1742 TREE_OPERAND (op1, 0),
1743 TREE_OPERAND (op1, 1)))
1744 return true;
1746 /* Default case. */
1747 return false;
1750 /* Forward declarations for some mutually recursive functions. */
1752 static tree
1753 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1754 enum tree_code code2, tree op2a, tree op2b);
1755 static tree
1756 and_var_with_comparison (tree var, bool invert,
1757 enum tree_code code2, tree op2a, tree op2b);
1758 static tree
1759 and_var_with_comparison_1 (gimple stmt,
1760 enum tree_code code2, tree op2a, tree op2b);
1761 static tree
1762 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1763 enum tree_code code2, tree op2a, tree op2b);
1764 static tree
1765 or_var_with_comparison (tree var, bool invert,
1766 enum tree_code code2, tree op2a, tree op2b);
1767 static tree
1768 or_var_with_comparison_1 (gimple stmt,
1769 enum tree_code code2, tree op2a, tree op2b);
1771 /* Helper function for and_comparisons_1: try to simplify the AND of the
1772 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1773 If INVERT is true, invert the value of the VAR before doing the AND.
1774 Return NULL_EXPR if we can't simplify this to a single expression. */
1776 static tree
1777 and_var_with_comparison (tree var, bool invert,
1778 enum tree_code code2, tree op2a, tree op2b)
1780 tree t;
1781 gimple stmt = SSA_NAME_DEF_STMT (var);
1783 /* We can only deal with variables whose definitions are assignments. */
1784 if (!is_gimple_assign (stmt))
1785 return NULL_TREE;
1787 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1788 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1789 Then we only have to consider the simpler non-inverted cases. */
1790 if (invert)
1791 t = or_var_with_comparison_1 (stmt,
1792 invert_tree_comparison (code2, false),
1793 op2a, op2b);
1794 else
1795 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1796 return canonicalize_bool (t, invert);
1799 /* Try to simplify the AND of the ssa variable defined by the assignment
1800 STMT with the comparison specified by (OP2A CODE2 OP2B).
1801 Return NULL_EXPR if we can't simplify this to a single expression. */
1803 static tree
1804 and_var_with_comparison_1 (gimple stmt,
1805 enum tree_code code2, tree op2a, tree op2b)
1807 tree var = gimple_assign_lhs (stmt);
1808 tree true_test_var = NULL_TREE;
1809 tree false_test_var = NULL_TREE;
1810 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1812 /* Check for identities like (var AND (var == 0)) => false. */
1813 if (TREE_CODE (op2a) == SSA_NAME
1814 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1816 if ((code2 == NE_EXPR && integer_zerop (op2b))
1817 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1819 true_test_var = op2a;
1820 if (var == true_test_var)
1821 return var;
1823 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1824 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1826 false_test_var = op2a;
1827 if (var == false_test_var)
1828 return boolean_false_node;
1832 /* If the definition is a comparison, recurse on it. */
1833 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1835 tree t = and_comparisons_1 (innercode,
1836 gimple_assign_rhs1 (stmt),
1837 gimple_assign_rhs2 (stmt),
1838 code2,
1839 op2a,
1840 op2b);
1841 if (t)
1842 return t;
1845 /* If the definition is an AND or OR expression, we may be able to
1846 simplify by reassociating. */
1847 if (innercode == TRUTH_AND_EXPR
1848 || innercode == TRUTH_OR_EXPR
1849 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1850 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1852 tree inner1 = gimple_assign_rhs1 (stmt);
1853 tree inner2 = gimple_assign_rhs2 (stmt);
1854 gimple s;
1855 tree t;
1856 tree partial = NULL_TREE;
1857 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1859 /* Check for boolean identities that don't require recursive examination
1860 of inner1/inner2:
1861 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1862 inner1 AND (inner1 OR inner2) => inner1
1863 !inner1 AND (inner1 AND inner2) => false
1864 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1865 Likewise for similar cases involving inner2. */
1866 if (inner1 == true_test_var)
1867 return (is_and ? var : inner1);
1868 else if (inner2 == true_test_var)
1869 return (is_and ? var : inner2);
1870 else if (inner1 == false_test_var)
1871 return (is_and
1872 ? boolean_false_node
1873 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1874 else if (inner2 == false_test_var)
1875 return (is_and
1876 ? boolean_false_node
1877 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1879 /* Next, redistribute/reassociate the AND across the inner tests.
1880 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1881 if (TREE_CODE (inner1) == SSA_NAME
1882 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1883 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1884 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1885 gimple_assign_rhs1 (s),
1886 gimple_assign_rhs2 (s),
1887 code2, op2a, op2b)))
1889 /* Handle the AND case, where we are reassociating:
1890 (inner1 AND inner2) AND (op2a code2 op2b)
1891 => (t AND inner2)
1892 If the partial result t is a constant, we win. Otherwise
1893 continue on to try reassociating with the other inner test. */
1894 if (is_and)
1896 if (integer_onep (t))
1897 return inner2;
1898 else if (integer_zerop (t))
1899 return boolean_false_node;
1902 /* Handle the OR case, where we are redistributing:
1903 (inner1 OR inner2) AND (op2a code2 op2b)
1904 => (t OR (inner2 AND (op2a code2 op2b))) */
1905 else if (integer_onep (t))
1906 return boolean_true_node;
1908 /* Save partial result for later. */
1909 partial = t;
1912 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1913 if (TREE_CODE (inner2) == SSA_NAME
1914 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1915 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1916 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1917 gimple_assign_rhs1 (s),
1918 gimple_assign_rhs2 (s),
1919 code2, op2a, op2b)))
1921 /* Handle the AND case, where we are reassociating:
1922 (inner1 AND inner2) AND (op2a code2 op2b)
1923 => (inner1 AND t) */
1924 if (is_and)
1926 if (integer_onep (t))
1927 return inner1;
1928 else if (integer_zerop (t))
1929 return boolean_false_node;
1930 /* If both are the same, we can apply the identity
1931 (x AND x) == x. */
1932 else if (partial && same_bool_result_p (t, partial))
1933 return t;
1936 /* Handle the OR case. where we are redistributing:
1937 (inner1 OR inner2) AND (op2a code2 op2b)
1938 => (t OR (inner1 AND (op2a code2 op2b)))
1939 => (t OR partial) */
1940 else
1942 if (integer_onep (t))
1943 return boolean_true_node;
1944 else if (partial)
1946 /* We already got a simplification for the other
1947 operand to the redistributed OR expression. The
1948 interesting case is when at least one is false.
1949 Or, if both are the same, we can apply the identity
1950 (x OR x) == x. */
1951 if (integer_zerop (partial))
1952 return t;
1953 else if (integer_zerop (t))
1954 return partial;
1955 else if (same_bool_result_p (t, partial))
1956 return t;
1961 return NULL_TREE;
1964 /* Try to simplify the AND of two comparisons defined by
1965 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1966 If this can be done without constructing an intermediate value,
1967 return the resulting tree; otherwise NULL_TREE is returned.
1968 This function is deliberately asymmetric as it recurses on SSA_DEFs
1969 in the first comparison but not the second. */
1971 static tree
1972 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1973 enum tree_code code2, tree op2a, tree op2b)
1975 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1976 if (operand_equal_p (op1a, op2a, 0)
1977 && operand_equal_p (op1b, op2b, 0))
1979 tree t = combine_comparisons (UNKNOWN_LOCATION,
1980 TRUTH_ANDIF_EXPR, code1, code2,
1981 boolean_type_node, op1a, op1b);
1982 if (t)
1983 return t;
1986 /* Likewise the swapped case of the above. */
1987 if (operand_equal_p (op1a, op2b, 0)
1988 && operand_equal_p (op1b, op2a, 0))
1990 tree t = combine_comparisons (UNKNOWN_LOCATION,
1991 TRUTH_ANDIF_EXPR, code1,
1992 swap_tree_comparison (code2),
1993 boolean_type_node, op1a, op1b);
1994 if (t)
1995 return t;
1998 /* If both comparisons are of the same value against constants, we might
1999 be able to merge them. */
2000 if (operand_equal_p (op1a, op2a, 0)
2001 && TREE_CODE (op1b) == INTEGER_CST
2002 && TREE_CODE (op2b) == INTEGER_CST)
2004 int cmp = tree_int_cst_compare (op1b, op2b);
2006 /* If we have (op1a == op1b), we should either be able to
2007 return that or FALSE, depending on whether the constant op1b
2008 also satisfies the other comparison against op2b. */
2009 if (code1 == EQ_EXPR)
2011 bool done = true;
2012 bool val;
2013 switch (code2)
2015 case EQ_EXPR: val = (cmp == 0); break;
2016 case NE_EXPR: val = (cmp != 0); break;
2017 case LT_EXPR: val = (cmp < 0); break;
2018 case GT_EXPR: val = (cmp > 0); break;
2019 case LE_EXPR: val = (cmp <= 0); break;
2020 case GE_EXPR: val = (cmp >= 0); break;
2021 default: done = false;
2023 if (done)
2025 if (val)
2026 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2027 else
2028 return boolean_false_node;
2031 /* Likewise if the second comparison is an == comparison. */
2032 else if (code2 == EQ_EXPR)
2034 bool done = true;
2035 bool val;
2036 switch (code1)
2038 case EQ_EXPR: val = (cmp == 0); break;
2039 case NE_EXPR: val = (cmp != 0); break;
2040 case LT_EXPR: val = (cmp > 0); break;
2041 case GT_EXPR: val = (cmp < 0); break;
2042 case LE_EXPR: val = (cmp >= 0); break;
2043 case GE_EXPR: val = (cmp <= 0); break;
2044 default: done = false;
2046 if (done)
2048 if (val)
2049 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2050 else
2051 return boolean_false_node;
2055 /* Same business with inequality tests. */
2056 else if (code1 == NE_EXPR)
2058 bool val;
2059 switch (code2)
2061 case EQ_EXPR: val = (cmp != 0); break;
2062 case NE_EXPR: val = (cmp == 0); break;
2063 case LT_EXPR: val = (cmp >= 0); break;
2064 case GT_EXPR: val = (cmp <= 0); break;
2065 case LE_EXPR: val = (cmp > 0); break;
2066 case GE_EXPR: val = (cmp < 0); break;
2067 default:
2068 val = false;
2070 if (val)
2071 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2073 else if (code2 == NE_EXPR)
2075 bool val;
2076 switch (code1)
2078 case EQ_EXPR: val = (cmp == 0); break;
2079 case NE_EXPR: val = (cmp != 0); break;
2080 case LT_EXPR: val = (cmp <= 0); break;
2081 case GT_EXPR: val = (cmp >= 0); break;
2082 case LE_EXPR: val = (cmp < 0); break;
2083 case GE_EXPR: val = (cmp > 0); break;
2084 default:
2085 val = false;
2087 if (val)
2088 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2091 /* Chose the more restrictive of two < or <= comparisons. */
2092 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2093 && (code2 == LT_EXPR || code2 == LE_EXPR))
2095 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2096 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2097 else
2098 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2101 /* Likewise chose the more restrictive of two > or >= comparisons. */
2102 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2103 && (code2 == GT_EXPR || code2 == GE_EXPR))
2105 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2106 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2107 else
2108 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2111 /* Check for singleton ranges. */
2112 else if (cmp == 0
2113 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2114 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2115 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2117 /* Check for disjoint ranges. */
2118 else if (cmp <= 0
2119 && (code1 == LT_EXPR || code1 == LE_EXPR)
2120 && (code2 == GT_EXPR || code2 == GE_EXPR))
2121 return boolean_false_node;
2122 else if (cmp >= 0
2123 && (code1 == GT_EXPR || code1 == GE_EXPR)
2124 && (code2 == LT_EXPR || code2 == LE_EXPR))
2125 return boolean_false_node;
2128 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2129 NAME's definition is a truth value. See if there are any simplifications
2130 that can be done against the NAME's definition. */
2131 if (TREE_CODE (op1a) == SSA_NAME
2132 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2133 && (integer_zerop (op1b) || integer_onep (op1b)))
2135 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2136 || (code1 == NE_EXPR && integer_onep (op1b)));
2137 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2138 switch (gimple_code (stmt))
2140 case GIMPLE_ASSIGN:
2141 /* Try to simplify by copy-propagating the definition. */
2142 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2144 case GIMPLE_PHI:
2145 /* If every argument to the PHI produces the same result when
2146 ANDed with the second comparison, we win.
2147 Do not do this unless the type is bool since we need a bool
2148 result here anyway. */
2149 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2151 tree result = NULL_TREE;
2152 unsigned i;
2153 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2155 tree arg = gimple_phi_arg_def (stmt, i);
2157 /* If this PHI has itself as an argument, ignore it.
2158 If all the other args produce the same result,
2159 we're still OK. */
2160 if (arg == gimple_phi_result (stmt))
2161 continue;
2162 else if (TREE_CODE (arg) == INTEGER_CST)
2164 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2166 if (!result)
2167 result = boolean_false_node;
2168 else if (!integer_zerop (result))
2169 return NULL_TREE;
2171 else if (!result)
2172 result = fold_build2 (code2, boolean_type_node,
2173 op2a, op2b);
2174 else if (!same_bool_comparison_p (result,
2175 code2, op2a, op2b))
2176 return NULL_TREE;
2178 else if (TREE_CODE (arg) == SSA_NAME)
2180 tree temp = and_var_with_comparison (arg, invert,
2181 code2, op2a, op2b);
2182 if (!temp)
2183 return NULL_TREE;
2184 else if (!result)
2185 result = temp;
2186 else if (!same_bool_result_p (result, temp))
2187 return NULL_TREE;
2189 else
2190 return NULL_TREE;
2192 return result;
2195 default:
2196 break;
2199 return NULL_TREE;
2202 /* Try to simplify the AND of two comparisons, specified by
2203 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2204 If this can be simplified to a single expression (without requiring
2205 introducing more SSA variables to hold intermediate values),
2206 return the resulting tree. Otherwise return NULL_TREE.
2207 If the result expression is non-null, it has boolean type. */
2209 tree
2210 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2211 enum tree_code code2, tree op2a, tree op2b)
2213 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2214 if (t)
2215 return t;
2216 else
2217 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2220 /* Helper function for or_comparisons_1: try to simplify the OR of the
2221 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2222 If INVERT is true, invert the value of VAR before doing the OR.
2223 Return NULL_EXPR if we can't simplify this to a single expression. */
2225 static tree
2226 or_var_with_comparison (tree var, bool invert,
2227 enum tree_code code2, tree op2a, tree op2b)
2229 tree t;
2230 gimple stmt = SSA_NAME_DEF_STMT (var);
2232 /* We can only deal with variables whose definitions are assignments. */
2233 if (!is_gimple_assign (stmt))
2234 return NULL_TREE;
2236 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2237 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2238 Then we only have to consider the simpler non-inverted cases. */
2239 if (invert)
2240 t = and_var_with_comparison_1 (stmt,
2241 invert_tree_comparison (code2, false),
2242 op2a, op2b);
2243 else
2244 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2245 return canonicalize_bool (t, invert);
2248 /* Try to simplify the OR of the ssa variable defined by the assignment
2249 STMT with the comparison specified by (OP2A CODE2 OP2B).
2250 Return NULL_EXPR if we can't simplify this to a single expression. */
2252 static tree
2253 or_var_with_comparison_1 (gimple stmt,
2254 enum tree_code code2, tree op2a, tree op2b)
2256 tree var = gimple_assign_lhs (stmt);
2257 tree true_test_var = NULL_TREE;
2258 tree false_test_var = NULL_TREE;
2259 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2261 /* Check for identities like (var OR (var != 0)) => true . */
2262 if (TREE_CODE (op2a) == SSA_NAME
2263 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2265 if ((code2 == NE_EXPR && integer_zerop (op2b))
2266 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2268 true_test_var = op2a;
2269 if (var == true_test_var)
2270 return var;
2272 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2273 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2275 false_test_var = op2a;
2276 if (var == false_test_var)
2277 return boolean_true_node;
2281 /* If the definition is a comparison, recurse on it. */
2282 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2284 tree t = or_comparisons_1 (innercode,
2285 gimple_assign_rhs1 (stmt),
2286 gimple_assign_rhs2 (stmt),
2287 code2,
2288 op2a,
2289 op2b);
2290 if (t)
2291 return t;
2294 /* If the definition is an AND or OR expression, we may be able to
2295 simplify by reassociating. */
2296 if (innercode == TRUTH_AND_EXPR
2297 || innercode == TRUTH_OR_EXPR
2298 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2299 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2301 tree inner1 = gimple_assign_rhs1 (stmt);
2302 tree inner2 = gimple_assign_rhs2 (stmt);
2303 gimple s;
2304 tree t;
2305 tree partial = NULL_TREE;
2306 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2308 /* Check for boolean identities that don't require recursive examination
2309 of inner1/inner2:
2310 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2311 inner1 OR (inner1 AND inner2) => inner1
2312 !inner1 OR (inner1 OR inner2) => true
2313 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2315 if (inner1 == true_test_var)
2316 return (is_or ? var : inner1);
2317 else if (inner2 == true_test_var)
2318 return (is_or ? var : inner2);
2319 else if (inner1 == false_test_var)
2320 return (is_or
2321 ? boolean_true_node
2322 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2323 else if (inner2 == false_test_var)
2324 return (is_or
2325 ? boolean_true_node
2326 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2328 /* Next, redistribute/reassociate the OR across the inner tests.
2329 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2330 if (TREE_CODE (inner1) == SSA_NAME
2331 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2332 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2333 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2334 gimple_assign_rhs1 (s),
2335 gimple_assign_rhs2 (s),
2336 code2, op2a, op2b)))
2338 /* Handle the OR case, where we are reassociating:
2339 (inner1 OR inner2) OR (op2a code2 op2b)
2340 => (t OR inner2)
2341 If the partial result t is a constant, we win. Otherwise
2342 continue on to try reassociating with the other inner test. */
2343 if (is_or)
2345 if (integer_onep (t))
2346 return boolean_true_node;
2347 else if (integer_zerop (t))
2348 return inner2;
2351 /* Handle the AND case, where we are redistributing:
2352 (inner1 AND inner2) OR (op2a code2 op2b)
2353 => (t AND (inner2 OR (op2a code op2b))) */
2354 else if (integer_zerop (t))
2355 return boolean_false_node;
2357 /* Save partial result for later. */
2358 partial = t;
2361 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2362 if (TREE_CODE (inner2) == SSA_NAME
2363 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2364 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2365 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2366 gimple_assign_rhs1 (s),
2367 gimple_assign_rhs2 (s),
2368 code2, op2a, op2b)))
2370 /* Handle the OR case, where we are reassociating:
2371 (inner1 OR inner2) OR (op2a code2 op2b)
2372 => (inner1 OR t)
2373 => (t OR partial) */
2374 if (is_or)
2376 if (integer_zerop (t))
2377 return inner1;
2378 else if (integer_onep (t))
2379 return boolean_true_node;
2380 /* If both are the same, we can apply the identity
2381 (x OR x) == x. */
2382 else if (partial && same_bool_result_p (t, partial))
2383 return t;
2386 /* Handle the AND case, where we are redistributing:
2387 (inner1 AND inner2) OR (op2a code2 op2b)
2388 => (t AND (inner1 OR (op2a code2 op2b)))
2389 => (t AND partial) */
2390 else
2392 if (integer_zerop (t))
2393 return boolean_false_node;
2394 else if (partial)
2396 /* We already got a simplification for the other
2397 operand to the redistributed AND expression. The
2398 interesting case is when at least one is true.
2399 Or, if both are the same, we can apply the identity
2400 (x AND x) == x. */
2401 if (integer_onep (partial))
2402 return t;
2403 else if (integer_onep (t))
2404 return partial;
2405 else if (same_bool_result_p (t, partial))
2406 return t;
2411 return NULL_TREE;
2414 /* Try to simplify the OR of two comparisons defined by
2415 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2416 If this can be done without constructing an intermediate value,
2417 return the resulting tree; otherwise NULL_TREE is returned.
2418 This function is deliberately asymmetric as it recurses on SSA_DEFs
2419 in the first comparison but not the second. */
2421 static tree
2422 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2423 enum tree_code code2, tree op2a, tree op2b)
2425 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2426 if (operand_equal_p (op1a, op2a, 0)
2427 && operand_equal_p (op1b, op2b, 0))
2429 tree t = combine_comparisons (UNKNOWN_LOCATION,
2430 TRUTH_ORIF_EXPR, code1, code2,
2431 boolean_type_node, op1a, op1b);
2432 if (t)
2433 return t;
2436 /* Likewise the swapped case of the above. */
2437 if (operand_equal_p (op1a, op2b, 0)
2438 && operand_equal_p (op1b, op2a, 0))
2440 tree t = combine_comparisons (UNKNOWN_LOCATION,
2441 TRUTH_ORIF_EXPR, code1,
2442 swap_tree_comparison (code2),
2443 boolean_type_node, op1a, op1b);
2444 if (t)
2445 return t;
2448 /* If both comparisons are of the same value against constants, we might
2449 be able to merge them. */
2450 if (operand_equal_p (op1a, op2a, 0)
2451 && TREE_CODE (op1b) == INTEGER_CST
2452 && TREE_CODE (op2b) == INTEGER_CST)
2454 int cmp = tree_int_cst_compare (op1b, op2b);
2456 /* If we have (op1a != op1b), we should either be able to
2457 return that or TRUE, depending on whether the constant op1b
2458 also satisfies the other comparison against op2b. */
2459 if (code1 == NE_EXPR)
2461 bool done = true;
2462 bool val;
2463 switch (code2)
2465 case EQ_EXPR: val = (cmp == 0); break;
2466 case NE_EXPR: val = (cmp != 0); break;
2467 case LT_EXPR: val = (cmp < 0); break;
2468 case GT_EXPR: val = (cmp > 0); break;
2469 case LE_EXPR: val = (cmp <= 0); break;
2470 case GE_EXPR: val = (cmp >= 0); break;
2471 default: done = false;
2473 if (done)
2475 if (val)
2476 return boolean_true_node;
2477 else
2478 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2481 /* Likewise if the second comparison is a != comparison. */
2482 else if (code2 == NE_EXPR)
2484 bool done = true;
2485 bool val;
2486 switch (code1)
2488 case EQ_EXPR: val = (cmp == 0); break;
2489 case NE_EXPR: val = (cmp != 0); break;
2490 case LT_EXPR: val = (cmp > 0); break;
2491 case GT_EXPR: val = (cmp < 0); break;
2492 case LE_EXPR: val = (cmp >= 0); break;
2493 case GE_EXPR: val = (cmp <= 0); break;
2494 default: done = false;
2496 if (done)
2498 if (val)
2499 return boolean_true_node;
2500 else
2501 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2505 /* See if an equality test is redundant with the other comparison. */
2506 else if (code1 == EQ_EXPR)
2508 bool val;
2509 switch (code2)
2511 case EQ_EXPR: val = (cmp == 0); break;
2512 case NE_EXPR: val = (cmp != 0); break;
2513 case LT_EXPR: val = (cmp < 0); break;
2514 case GT_EXPR: val = (cmp > 0); break;
2515 case LE_EXPR: val = (cmp <= 0); break;
2516 case GE_EXPR: val = (cmp >= 0); break;
2517 default:
2518 val = false;
2520 if (val)
2521 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2523 else if (code2 == EQ_EXPR)
2525 bool val;
2526 switch (code1)
2528 case EQ_EXPR: val = (cmp == 0); break;
2529 case NE_EXPR: val = (cmp != 0); break;
2530 case LT_EXPR: val = (cmp > 0); break;
2531 case GT_EXPR: val = (cmp < 0); break;
2532 case LE_EXPR: val = (cmp >= 0); break;
2533 case GE_EXPR: val = (cmp <= 0); break;
2534 default:
2535 val = false;
2537 if (val)
2538 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2541 /* Chose the less restrictive of two < or <= comparisons. */
2542 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2543 && (code2 == LT_EXPR || code2 == LE_EXPR))
2545 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2546 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2547 else
2548 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2551 /* Likewise chose the less restrictive of two > or >= comparisons. */
2552 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2553 && (code2 == GT_EXPR || code2 == GE_EXPR))
2555 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2556 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2557 else
2558 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2561 /* Check for singleton ranges. */
2562 else if (cmp == 0
2563 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2564 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2565 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2567 /* Check for less/greater pairs that don't restrict the range at all. */
2568 else if (cmp >= 0
2569 && (code1 == LT_EXPR || code1 == LE_EXPR)
2570 && (code2 == GT_EXPR || code2 == GE_EXPR))
2571 return boolean_true_node;
2572 else if (cmp <= 0
2573 && (code1 == GT_EXPR || code1 == GE_EXPR)
2574 && (code2 == LT_EXPR || code2 == LE_EXPR))
2575 return boolean_true_node;
2578 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2579 NAME's definition is a truth value. See if there are any simplifications
2580 that can be done against the NAME's definition. */
2581 if (TREE_CODE (op1a) == SSA_NAME
2582 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2583 && (integer_zerop (op1b) || integer_onep (op1b)))
2585 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2586 || (code1 == NE_EXPR && integer_onep (op1b)));
2587 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2588 switch (gimple_code (stmt))
2590 case GIMPLE_ASSIGN:
2591 /* Try to simplify by copy-propagating the definition. */
2592 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2594 case GIMPLE_PHI:
2595 /* If every argument to the PHI produces the same result when
2596 ORed with the second comparison, we win.
2597 Do not do this unless the type is bool since we need a bool
2598 result here anyway. */
2599 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2601 tree result = NULL_TREE;
2602 unsigned i;
2603 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2605 tree arg = gimple_phi_arg_def (stmt, i);
2607 /* If this PHI has itself as an argument, ignore it.
2608 If all the other args produce the same result,
2609 we're still OK. */
2610 if (arg == gimple_phi_result (stmt))
2611 continue;
2612 else if (TREE_CODE (arg) == INTEGER_CST)
2614 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2616 if (!result)
2617 result = boolean_true_node;
2618 else if (!integer_onep (result))
2619 return NULL_TREE;
2621 else if (!result)
2622 result = fold_build2 (code2, boolean_type_node,
2623 op2a, op2b);
2624 else if (!same_bool_comparison_p (result,
2625 code2, op2a, op2b))
2626 return NULL_TREE;
2628 else if (TREE_CODE (arg) == SSA_NAME)
2630 tree temp = or_var_with_comparison (arg, invert,
2631 code2, op2a, op2b);
2632 if (!temp)
2633 return NULL_TREE;
2634 else if (!result)
2635 result = temp;
2636 else if (!same_bool_result_p (result, temp))
2637 return NULL_TREE;
2639 else
2640 return NULL_TREE;
2642 return result;
2645 default:
2646 break;
2649 return NULL_TREE;
2652 /* Try to simplify the OR of two comparisons, specified by
2653 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2654 If this can be simplified to a single expression (without requiring
2655 introducing more SSA variables to hold intermediate values),
2656 return the resulting tree. Otherwise return NULL_TREE.
2657 If the result expression is non-null, it has boolean type. */
2659 tree
2660 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2661 enum tree_code code2, tree op2a, tree op2b)
2663 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2664 if (t)
2665 return t;
2666 else
2667 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2671 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2673 Either NULL_TREE, a simplified but non-constant or a constant
2674 is returned.
2676 ??? This should go into a gimple-fold-inline.h file to be eventually
2677 privatized with the single valueize function used in the various TUs
2678 to avoid the indirect function call overhead. */
2680 tree
2681 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2683 location_t loc = gimple_location (stmt);
2684 switch (gimple_code (stmt))
2686 case GIMPLE_ASSIGN:
2688 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2690 switch (get_gimple_rhs_class (subcode))
2692 case GIMPLE_SINGLE_RHS:
2694 tree rhs = gimple_assign_rhs1 (stmt);
2695 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2697 if (TREE_CODE (rhs) == SSA_NAME)
2699 /* If the RHS is an SSA_NAME, return its known constant value,
2700 if any. */
2701 return (*valueize) (rhs);
2703 /* Handle propagating invariant addresses into address
2704 operations. */
2705 else if (TREE_CODE (rhs) == ADDR_EXPR
2706 && !is_gimple_min_invariant (rhs))
2708 HOST_WIDE_INT offset;
2709 tree base;
2710 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2711 &offset,
2712 valueize);
2713 if (base
2714 && (CONSTANT_CLASS_P (base)
2715 || decl_address_invariant_p (base)))
2716 return build_invariant_address (TREE_TYPE (rhs),
2717 base, offset);
2719 else if (TREE_CODE (rhs) == CONSTRUCTOR
2720 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2721 && (CONSTRUCTOR_NELTS (rhs)
2722 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2724 unsigned i;
2725 tree val, list;
2727 list = NULL_TREE;
2728 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2730 val = (*valueize) (val);
2731 if (TREE_CODE (val) == INTEGER_CST
2732 || TREE_CODE (val) == REAL_CST
2733 || TREE_CODE (val) == FIXED_CST)
2734 list = tree_cons (NULL_TREE, val, list);
2735 else
2736 return NULL_TREE;
2739 return build_vector (TREE_TYPE (rhs), nreverse (list));
2742 if (kind == tcc_reference)
2744 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2745 || TREE_CODE (rhs) == REALPART_EXPR
2746 || TREE_CODE (rhs) == IMAGPART_EXPR)
2747 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2749 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2750 return fold_unary_loc (EXPR_LOCATION (rhs),
2751 TREE_CODE (rhs),
2752 TREE_TYPE (rhs), val);
2754 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2755 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2757 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2758 return fold_ternary_loc (EXPR_LOCATION (rhs),
2759 TREE_CODE (rhs),
2760 TREE_TYPE (rhs), val,
2761 TREE_OPERAND (rhs, 1),
2762 TREE_OPERAND (rhs, 2));
2764 else if (TREE_CODE (rhs) == MEM_REF
2765 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2767 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2768 if (TREE_CODE (val) == ADDR_EXPR
2769 && is_gimple_min_invariant (val))
2771 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2772 unshare_expr (val),
2773 TREE_OPERAND (rhs, 1));
2774 if (tem)
2775 rhs = tem;
2778 return fold_const_aggregate_ref_1 (rhs, valueize);
2780 else if (kind == tcc_declaration)
2781 return get_symbol_constant_value (rhs);
2782 return rhs;
2785 case GIMPLE_UNARY_RHS:
2787 /* Handle unary operators that can appear in GIMPLE form.
2788 Note that we know the single operand must be a constant,
2789 so this should almost always return a simplified RHS. */
2790 tree lhs = gimple_assign_lhs (stmt);
2791 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2793 /* Conversions are useless for CCP purposes if they are
2794 value-preserving. Thus the restrictions that
2795 useless_type_conversion_p places for pointer type conversions
2796 do not apply here. Substitution later will only substitute to
2797 allowed places. */
2798 if (CONVERT_EXPR_CODE_P (subcode)
2799 && POINTER_TYPE_P (TREE_TYPE (lhs))
2800 && POINTER_TYPE_P (TREE_TYPE (op0)))
2802 tree tem;
2803 /* Try to re-construct array references on-the-fly. */
2804 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2805 TREE_TYPE (op0))
2806 && ((tem = maybe_fold_offset_to_address
2807 (loc,
2808 op0, integer_zero_node, TREE_TYPE (lhs)))
2809 != NULL_TREE))
2810 return tem;
2811 return op0;
2814 return
2815 fold_unary_ignore_overflow_loc (loc, subcode,
2816 gimple_expr_type (stmt), op0);
2819 case GIMPLE_BINARY_RHS:
2821 /* Handle binary operators that can appear in GIMPLE form. */
2822 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2823 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2825 /* Translate &x + CST into an invariant form suitable for
2826 further propagation. */
2827 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2828 && TREE_CODE (op0) == ADDR_EXPR
2829 && TREE_CODE (op1) == INTEGER_CST)
2831 tree off = fold_convert (ptr_type_node, op1);
2832 return build_fold_addr_expr
2833 (fold_build2 (MEM_REF,
2834 TREE_TYPE (TREE_TYPE (op0)),
2835 unshare_expr (op0), off));
2838 return fold_binary_loc (loc, subcode,
2839 gimple_expr_type (stmt), op0, op1);
2842 case GIMPLE_TERNARY_RHS:
2844 /* Handle ternary operators that can appear in GIMPLE form. */
2845 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2846 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2847 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2849 return fold_ternary_loc (loc, subcode,
2850 gimple_expr_type (stmt), op0, op1, op2);
2853 default:
2854 gcc_unreachable ();
2858 case GIMPLE_CALL:
2860 tree fn = (*valueize) (gimple_call_fn (stmt));
2861 if (TREE_CODE (fn) == ADDR_EXPR
2862 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2863 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2865 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2866 tree call, retval;
2867 unsigned i;
2868 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2869 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2870 call = build_call_array_loc (loc,
2871 gimple_call_return_type (stmt),
2872 fn, gimple_call_num_args (stmt), args);
2873 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2874 if (retval)
2875 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2876 STRIP_NOPS (retval);
2877 return retval;
2879 return NULL_TREE;
2882 default:
2883 return NULL_TREE;
2887 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2888 Returns NULL_TREE if folding to a constant is not possible, otherwise
2889 returns a constant according to is_gimple_min_invariant. */
2891 tree
2892 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2894 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2895 if (res && is_gimple_min_invariant (res))
2896 return res;
2897 return NULL_TREE;
2901 /* The following set of functions are supposed to fold references using
2902 their constant initializers. */
2904 static tree fold_ctor_reference (tree type, tree ctor,
2905 unsigned HOST_WIDE_INT offset,
2906 unsigned HOST_WIDE_INT size);
2908 /* See if we can find constructor defining value of BASE.
2909 When we know the consructor with constant offset (such as
2910 base is array[40] and we do know constructor of array), then
2911 BIT_OFFSET is adjusted accordingly.
2913 As a special case, return error_mark_node when constructor
2914 is not explicitly available, but it is known to be zero
2915 such as 'static const int a;'. */
2916 static tree
2917 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2918 tree (*valueize)(tree))
2920 HOST_WIDE_INT bit_offset2, size, max_size;
2921 if (TREE_CODE (base) == MEM_REF)
2923 if (!integer_zerop (TREE_OPERAND (base, 1)))
2925 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2926 return NULL_TREE;
2927 *bit_offset += (mem_ref_offset (base).low
2928 * BITS_PER_UNIT);
2931 if (valueize
2932 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2933 base = valueize (TREE_OPERAND (base, 0));
2934 if (!base || TREE_CODE (base) != ADDR_EXPR)
2935 return NULL_TREE;
2936 base = TREE_OPERAND (base, 0);
2939 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2940 DECL_INITIAL. If BASE is a nested reference into another
2941 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2942 the inner reference. */
2943 switch (TREE_CODE (base))
2945 case VAR_DECL:
2946 if (!const_value_known_p (base))
2947 return NULL_TREE;
2949 /* Fallthru. */
2950 case CONST_DECL:
2951 if (!DECL_INITIAL (base)
2952 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2953 return error_mark_node;
2954 return DECL_INITIAL (base);
2956 case ARRAY_REF:
2957 case COMPONENT_REF:
2958 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2959 if (max_size == -1 || size != max_size)
2960 return NULL_TREE;
2961 *bit_offset += bit_offset2;
2962 return get_base_constructor (base, bit_offset, valueize);
2964 case STRING_CST:
2965 case CONSTRUCTOR:
2966 return base;
2968 default:
2969 return NULL_TREE;
2973 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2974 to the memory at bit OFFSET.
2976 We do only simple job of folding byte accesses. */
2978 static tree
2979 fold_string_cst_ctor_reference (tree type, tree ctor,
2980 unsigned HOST_WIDE_INT offset,
2981 unsigned HOST_WIDE_INT size)
2983 if (INTEGRAL_TYPE_P (type)
2984 && (TYPE_MODE (type)
2985 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2986 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2987 == MODE_INT)
2988 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2989 && size == BITS_PER_UNIT
2990 && !(offset % BITS_PER_UNIT))
2992 offset /= BITS_PER_UNIT;
2993 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2994 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2995 [offset]));
2996 /* Folding
2997 const char a[20]="hello";
2998 return a[10];
3000 might lead to offset greater than string length. In this case we
3001 know value is either initialized to 0 or out of bounds. Return 0
3002 in both cases. */
3003 return build_zero_cst (type);
3005 return NULL_TREE;
3008 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3009 SIZE to the memory at bit OFFSET. */
3011 static tree
3012 fold_array_ctor_reference (tree type, tree ctor,
3013 unsigned HOST_WIDE_INT offset,
3014 unsigned HOST_WIDE_INT size)
3016 unsigned HOST_WIDE_INT cnt;
3017 tree cfield, cval;
3018 double_int low_bound, elt_size;
3019 double_int index, max_index;
3020 double_int access_index;
3021 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
3022 HOST_WIDE_INT inner_offset;
3024 /* Compute low bound and elt size. */
3025 if (domain_type && TYPE_MIN_VALUE (domain_type))
3027 /* Static constructors for variably sized objects makes no sense. */
3028 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
3029 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
3031 else
3032 low_bound = double_int_zero;
3033 /* Static constructors for variably sized objects makes no sense. */
3034 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
3035 == INTEGER_CST);
3036 elt_size =
3037 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
3040 /* We can handle only constantly sized accesses that are known to not
3041 be larger than size of array element. */
3042 if (!TYPE_SIZE_UNIT (type)
3043 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
3044 || double_int_cmp (elt_size,
3045 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
3046 return NULL_TREE;
3048 /* Compute the array index we look for. */
3049 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
3050 elt_size, TRUNC_DIV_EXPR);
3051 access_index = double_int_add (access_index, low_bound);
3053 /* And offset within the access. */
3054 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
3056 /* See if the array field is large enough to span whole access. We do not
3057 care to fold accesses spanning multiple array indexes. */
3058 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
3059 return NULL_TREE;
3061 index = double_int_sub (low_bound, double_int_one);
3062 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
3064 /* Array constructor might explicitely set index, or specify range
3065 or leave index NULL meaning that it is next index after previous
3066 one. */
3067 if (cfield)
3069 if (TREE_CODE (cfield) == INTEGER_CST)
3070 max_index = index = tree_to_double_int (cfield);
3071 else
3073 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3074 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3075 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3078 else
3079 max_index = index = double_int_add (index, double_int_one);
3081 /* Do we have match? */
3082 if (double_int_cmp (access_index, index, 1) >= 0
3083 && double_int_cmp (access_index, max_index, 1) <= 0)
3084 return fold_ctor_reference (type, cval, inner_offset, size);
3086 /* When memory is not explicitely mentioned in constructor,
3087 it is 0 (or out of range). */
3088 return build_zero_cst (type);
3091 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3092 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3094 static tree
3095 fold_nonarray_ctor_reference (tree type, tree ctor,
3096 unsigned HOST_WIDE_INT offset,
3097 unsigned HOST_WIDE_INT size)
3099 unsigned HOST_WIDE_INT cnt;
3100 tree cfield, cval;
3102 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3103 cval)
3105 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3106 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3107 tree field_size = DECL_SIZE (cfield);
3108 double_int bitoffset;
3109 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3110 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
3111 double_int bitoffset_end;
3113 /* Variable sized objects in static constructors makes no sense,
3114 but field_size can be NULL for flexible array members. */
3115 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3116 && TREE_CODE (byte_offset) == INTEGER_CST
3117 && (field_size != NULL_TREE
3118 ? TREE_CODE (field_size) == INTEGER_CST
3119 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3121 /* Compute bit offset of the field. */
3122 bitoffset = double_int_add (tree_to_double_int (field_offset),
3123 double_int_mul (byte_offset_cst,
3124 bits_per_unit_cst));
3125 /* Compute bit offset where the field ends. */
3126 if (field_size != NULL_TREE)
3127 bitoffset_end = double_int_add (bitoffset,
3128 tree_to_double_int (field_size));
3129 else
3130 bitoffset_end = double_int_zero;
3132 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
3133 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
3134 && (field_size == NULL_TREE
3135 || double_int_cmp (uhwi_to_double_int (offset),
3136 bitoffset_end, 0) < 0))
3138 double_int access_end = double_int_add (uhwi_to_double_int (offset),
3139 uhwi_to_double_int (size));
3140 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
3141 bitoffset);
3142 /* We do have overlap. Now see if field is large enough to
3143 cover the access. Give up for accesses spanning multiple
3144 fields. */
3145 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
3146 return NULL_TREE;
3147 return fold_ctor_reference (type, cval,
3148 double_int_to_uhwi (inner_offset), size);
3151 /* When memory is not explicitely mentioned in constructor, it is 0. */
3152 return build_zero_cst (type);
3155 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3156 to the memory at bit OFFSET. */
3158 static tree
3159 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3160 unsigned HOST_WIDE_INT size)
3162 tree ret;
3164 /* We found the field with exact match. */
3165 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3166 && !offset)
3167 return canonicalize_constructor_val (ctor);
3169 /* We are at the end of walk, see if we can view convert the
3170 result. */
3171 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3172 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3173 && operand_equal_p (TYPE_SIZE (type),
3174 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3176 ret = canonicalize_constructor_val (ctor);
3177 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3178 if (ret)
3179 STRIP_NOPS (ret);
3180 return ret;
3182 if (TREE_CODE (ctor) == STRING_CST)
3183 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3184 if (TREE_CODE (ctor) == CONSTRUCTOR)
3187 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
3188 return fold_array_ctor_reference (type, ctor, offset, size);
3189 else
3190 return fold_nonarray_ctor_reference (type, ctor, offset, size);
3193 return NULL_TREE;
3196 /* Return the tree representing the element referenced by T if T is an
3197 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3198 names using VALUEIZE. Return NULL_TREE otherwise. */
3200 tree
3201 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3203 tree ctor, idx, base;
3204 HOST_WIDE_INT offset, size, max_size;
3205 tree tem;
3207 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3208 return get_symbol_constant_value (t);
3210 tem = fold_read_from_constant_string (t);
3211 if (tem)
3212 return tem;
3214 switch (TREE_CODE (t))
3216 case ARRAY_REF:
3217 case ARRAY_RANGE_REF:
3218 /* Constant indexes are handled well by get_base_constructor.
3219 Only special case variable offsets.
3220 FIXME: This code can't handle nested references with variable indexes
3221 (they will be handled only by iteration of ccp). Perhaps we can bring
3222 get_ref_base_and_extent here and make it use a valueize callback. */
3223 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3224 && valueize
3225 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3226 && host_integerp (idx, 0))
3228 tree low_bound, unit_size;
3230 /* If the resulting bit-offset is constant, track it. */
3231 if ((low_bound = array_ref_low_bound (t),
3232 host_integerp (low_bound, 0))
3233 && (unit_size = array_ref_element_size (t),
3234 host_integerp (unit_size, 1)))
3236 offset = TREE_INT_CST_LOW (idx);
3237 offset -= TREE_INT_CST_LOW (low_bound);
3238 offset *= TREE_INT_CST_LOW (unit_size);
3239 offset *= BITS_PER_UNIT;
3241 base = TREE_OPERAND (t, 0);
3242 ctor = get_base_constructor (base, &offset, valueize);
3243 /* Empty constructor. Always fold to 0. */
3244 if (ctor == error_mark_node)
3245 return build_zero_cst (TREE_TYPE (t));
3246 /* Out of bound array access. Value is undefined,
3247 but don't fold. */
3248 if (offset < 0)
3249 return NULL_TREE;
3250 /* We can not determine ctor. */
3251 if (!ctor)
3252 return NULL_TREE;
3253 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3254 TREE_INT_CST_LOW (unit_size)
3255 * BITS_PER_UNIT);
3258 /* Fallthru. */
3260 case COMPONENT_REF:
3261 case BIT_FIELD_REF:
3262 case TARGET_MEM_REF:
3263 case MEM_REF:
3264 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3265 ctor = get_base_constructor (base, &offset, valueize);
3267 /* Empty constructor. Always fold to 0. */
3268 if (ctor == error_mark_node)
3269 return build_zero_cst (TREE_TYPE (t));
3270 /* We do not know precise address. */
3271 if (max_size == -1 || max_size != size)
3272 return NULL_TREE;
3273 /* We can not determine ctor. */
3274 if (!ctor)
3275 return NULL_TREE;
3277 /* Out of bound array access. Value is undefined, but don't fold. */
3278 if (offset < 0)
3279 return NULL_TREE;
3281 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3283 case REALPART_EXPR:
3284 case IMAGPART_EXPR:
3286 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3287 if (c && TREE_CODE (c) == COMPLEX_CST)
3288 return fold_build1_loc (EXPR_LOCATION (t),
3289 TREE_CODE (t), TREE_TYPE (t), c);
3290 break;
3293 default:
3294 break;
3297 return NULL_TREE;
3300 tree
3301 fold_const_aggregate_ref (tree t)
3303 return fold_const_aggregate_ref_1 (t, NULL);