-dA enhancement
[official-gcc.git] / gcc / gimple-fold.c
blobf1eb98e329ffe8d247f86c92e92c8728218f28e4
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced. */
84 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
85 return true;
86 /* If we already output the function body, we are safe. */
87 if (TREE_ASM_WRITTEN (decl))
88 return true;
89 if (TREE_CODE (decl) == FUNCTION_DECL)
91 node = cgraph_get_node (decl);
92 /* Check that we still have function body and that we didn't took
93 the decision to eliminate offline copy of the function yet.
94 The second is important when devirtualization happens during final
95 compilation stage when making a new reference no longer makes callee
96 to be compiled. */
97 if (!node || !node->analyzed || node->global.inlined_to)
98 return false;
100 else if (TREE_CODE (decl) == VAR_DECL)
102 vnode = varpool_get_node (decl);
103 if (!vnode || !vnode->finalized)
104 return false;
106 return true;
109 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
110 acceptable form for is_gimple_min_invariant. */
112 tree
113 canonicalize_constructor_val (tree cval)
115 STRIP_NOPS (cval);
116 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
118 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
119 TREE_OPERAND (cval, 0),
120 TREE_OPERAND (cval, 1),
121 TREE_TYPE (cval));
122 if (t)
123 cval = t;
125 if (TREE_CODE (cval) == ADDR_EXPR)
127 tree base = get_base_address (TREE_OPERAND (cval, 0));
129 if (base
130 && (TREE_CODE (base) == VAR_DECL
131 || TREE_CODE (base) == FUNCTION_DECL)
132 && !can_refer_decl_in_current_unit_p (base))
133 return NULL_TREE;
134 if (cfun && base && TREE_CODE (base) == VAR_DECL)
135 add_referenced_var (base);
136 /* Fixup types in global initializers. */
137 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
138 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
140 return cval;
143 /* If SYM is a constant variable with known value, return the value.
144 NULL_TREE is returned otherwise. */
146 tree
147 get_symbol_constant_value (tree sym)
149 if (const_value_known_p (sym))
151 tree val = DECL_INITIAL (sym);
152 if (val)
154 val = canonicalize_constructor_val (val);
155 if (val && is_gimple_min_invariant (val))
156 return val;
157 else
158 return NULL_TREE;
160 /* Variables declared 'const' without an initializer
161 have zero as the initializer if they may not be
162 overridden at link or run time. */
163 if (!val
164 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
165 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
166 return build_zero_cst (TREE_TYPE (sym));
169 return NULL_TREE;
173 /* Return true if we may propagate the address expression ADDR into the
174 dereference DEREF and cancel them. */
176 bool
177 may_propagate_address_into_dereference (tree addr, tree deref)
179 gcc_assert (TREE_CODE (deref) == MEM_REF
180 && TREE_CODE (addr) == ADDR_EXPR);
182 /* Don't propagate if ADDR's operand has incomplete type. */
183 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
184 return false;
186 /* If the address is invariant then we do not need to preserve restrict
187 qualifications. But we do need to preserve volatile qualifiers until
188 we can annotate the folded dereference itself properly. */
189 if (is_gimple_min_invariant (addr)
190 && (!TREE_THIS_VOLATILE (deref)
191 || TYPE_VOLATILE (TREE_TYPE (addr))))
192 return useless_type_conversion_p (TREE_TYPE (deref),
193 TREE_TYPE (TREE_OPERAND (addr, 0)));
195 /* Else both the address substitution and the folding must result in
196 a valid useless type conversion sequence. */
197 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
198 TREE_TYPE (addr))
199 && useless_type_conversion_p (TREE_TYPE (deref),
200 TREE_TYPE (TREE_OPERAND (addr, 0))));
204 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
205 BASE is an array type. OFFSET is a byte displacement.
207 LOC is the location of the original expression. */
209 static tree
210 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
212 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
213 tree array_type, elt_type, elt_size;
214 tree domain_type;
216 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
217 measured in units of the size of elements type) from that ARRAY_REF).
218 We can't do anything if either is variable.
220 The case we handle here is *(&A[N]+O). */
221 if (TREE_CODE (base) == ARRAY_REF)
223 tree low_bound = array_ref_low_bound (base);
225 elt_offset = TREE_OPERAND (base, 1);
226 if (TREE_CODE (low_bound) != INTEGER_CST
227 || TREE_CODE (elt_offset) != INTEGER_CST)
228 return NULL_TREE;
230 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
231 base = TREE_OPERAND (base, 0);
234 /* Ignore stupid user tricks of indexing non-array variables. */
235 array_type = TREE_TYPE (base);
236 if (TREE_CODE (array_type) != ARRAY_TYPE)
237 return NULL_TREE;
238 elt_type = TREE_TYPE (array_type);
240 /* Use signed size type for intermediate computation on the index. */
241 idx_type = ssizetype;
243 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
244 element type (so we can use the alignment if it's not constant).
245 Otherwise, compute the offset as an index by using a division. If the
246 division isn't exact, then don't do anything. */
247 elt_size = TYPE_SIZE_UNIT (elt_type);
248 if (!elt_size)
249 return NULL;
250 if (integer_zerop (offset))
252 if (TREE_CODE (elt_size) != INTEGER_CST)
253 elt_size = size_int (TYPE_ALIGN (elt_type));
255 idx = build_int_cst (idx_type, 0);
257 else
259 unsigned HOST_WIDE_INT lquo, lrem;
260 HOST_WIDE_INT hquo, hrem;
261 double_int soffset;
263 /* The final array offset should be signed, so we need
264 to sign-extend the (possibly pointer) offset here
265 and use signed division. */
266 soffset = double_int_sext (tree_to_double_int (offset),
267 TYPE_PRECISION (TREE_TYPE (offset)));
268 if (TREE_CODE (elt_size) != INTEGER_CST
269 || div_and_round_double (TRUNC_DIV_EXPR, 0,
270 soffset.low, soffset.high,
271 TREE_INT_CST_LOW (elt_size),
272 TREE_INT_CST_HIGH (elt_size),
273 &lquo, &hquo, &lrem, &hrem)
274 || lrem || hrem)
275 return NULL_TREE;
277 idx = build_int_cst_wide (idx_type, lquo, hquo);
280 /* Assume the low bound is zero. If there is a domain type, get the
281 low bound, if any, convert the index into that type, and add the
282 low bound. */
283 min_idx = build_int_cst (idx_type, 0);
284 domain_type = TYPE_DOMAIN (array_type);
285 if (domain_type)
287 idx_type = domain_type;
288 if (TYPE_MIN_VALUE (idx_type))
289 min_idx = TYPE_MIN_VALUE (idx_type);
290 else
291 min_idx = fold_convert (idx_type, min_idx);
293 if (TREE_CODE (min_idx) != INTEGER_CST)
294 return NULL_TREE;
296 elt_offset = fold_convert (idx_type, elt_offset);
299 if (!integer_zerop (min_idx))
300 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
301 if (!integer_zerop (elt_offset))
302 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
304 /* Make sure to possibly truncate late after offsetting. */
305 idx = fold_convert (idx_type, idx);
307 /* We don't want to construct access past array bounds. For example
308 char *(c[4]);
309 c[3][2];
310 should not be simplified into (*c)[14] or tree-vrp will
311 give false warnings.
312 This is only an issue for multi-dimensional arrays. */
313 if (TREE_CODE (elt_type) == ARRAY_TYPE
314 && domain_type)
316 if (TYPE_MAX_VALUE (domain_type)
317 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
318 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
319 return NULL_TREE;
320 else if (TYPE_MIN_VALUE (domain_type)
321 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
322 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
323 return NULL_TREE;
324 else if (compare_tree_int (idx, 0) < 0)
325 return NULL_TREE;
329 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
330 SET_EXPR_LOCATION (t, loc);
331 return t;
336 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
337 LOC is the location of original expression.
339 Before attempting the conversion strip off existing ADDR_EXPRs. */
341 tree
342 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
343 tree orig_type)
345 tree ret;
347 STRIP_NOPS (base);
348 if (TREE_CODE (base) != ADDR_EXPR)
349 return NULL_TREE;
351 base = TREE_OPERAND (base, 0);
352 if (types_compatible_p (orig_type, TREE_TYPE (base))
353 && integer_zerop (offset))
354 return base;
356 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
357 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
358 return ret;
359 return NULL_TREE;
362 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
363 LOC is the location of the original expression. */
365 tree
366 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
367 tree orig_type)
369 tree base, ret;
371 STRIP_NOPS (addr);
372 if (TREE_CODE (addr) != ADDR_EXPR)
373 return NULL_TREE;
374 base = TREE_OPERAND (addr, 0);
375 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
376 if (ret)
378 ret = build_fold_addr_expr (ret);
379 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
380 return NULL_TREE;
381 SET_EXPR_LOCATION (ret, loc);
384 return ret;
388 /* A quaint feature extant in our address arithmetic is that there
389 can be hidden type changes here. The type of the result need
390 not be the same as the type of the input pointer.
392 What we're after here is an expression of the form
393 (T *)(&array + const)
394 where array is OP0, const is OP1, RES_TYPE is T and
395 the cast doesn't actually exist, but is implicit in the
396 type of the POINTER_PLUS_EXPR. We'd like to turn this into
397 &array[x]
398 which may be able to propagate further. */
400 tree
401 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
403 tree ptd_type;
404 tree t;
406 /* The first operand should be an ADDR_EXPR. */
407 if (TREE_CODE (op0) != ADDR_EXPR)
408 return NULL_TREE;
409 op0 = TREE_OPERAND (op0, 0);
411 /* It had better be a constant. */
412 if (TREE_CODE (op1) != INTEGER_CST)
414 /* Or op0 should now be A[0] and the non-constant offset defined
415 via a multiplication by the array element size. */
416 if (TREE_CODE (op0) == ARRAY_REF
417 /* As we will end up creating a variable index array access
418 in the outermost array dimension make sure there isn't
419 a more inner array that the index could overflow to. */
420 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
421 && integer_zerop (TREE_OPERAND (op0, 1))
422 && TREE_CODE (op1) == SSA_NAME)
424 gimple offset_def = SSA_NAME_DEF_STMT (op1);
425 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
426 if (!host_integerp (elsz, 1)
427 || !is_gimple_assign (offset_def))
428 return NULL_TREE;
430 /* Do not build array references of something that we can't
431 see the true number of array dimensions for. */
432 if (!DECL_P (TREE_OPERAND (op0, 0))
433 && !handled_component_p (TREE_OPERAND (op0, 0)))
434 return NULL_TREE;
436 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
437 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
438 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
439 return build_fold_addr_expr
440 (build4 (ARRAY_REF, TREE_TYPE (op0),
441 TREE_OPERAND (op0, 0),
442 gimple_assign_rhs1 (offset_def),
443 TREE_OPERAND (op0, 2),
444 TREE_OPERAND (op0, 3)));
445 else if (integer_onep (elsz)
446 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
447 return build_fold_addr_expr
448 (build4 (ARRAY_REF, TREE_TYPE (op0),
449 TREE_OPERAND (op0, 0),
450 op1,
451 TREE_OPERAND (op0, 2),
452 TREE_OPERAND (op0, 3)));
454 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
455 /* Dto. */
456 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
457 && TREE_CODE (op1) == SSA_NAME)
459 gimple offset_def = SSA_NAME_DEF_STMT (op1);
460 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
461 if (!host_integerp (elsz, 1)
462 || !is_gimple_assign (offset_def))
463 return NULL_TREE;
465 /* Do not build array references of something that we can't
466 see the true number of array dimensions for. */
467 if (!DECL_P (op0)
468 && !handled_component_p (op0))
469 return NULL_TREE;
471 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
472 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
473 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
474 return build_fold_addr_expr
475 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
476 op0, gimple_assign_rhs1 (offset_def),
477 integer_zero_node, NULL_TREE));
478 else if (integer_onep (elsz)
479 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
480 return build_fold_addr_expr
481 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
482 op0, op1,
483 integer_zero_node, NULL_TREE));
486 return NULL_TREE;
489 /* If the first operand is an ARRAY_REF, expand it so that we can fold
490 the offset into it. */
491 while (TREE_CODE (op0) == ARRAY_REF)
493 tree array_obj = TREE_OPERAND (op0, 0);
494 tree array_idx = TREE_OPERAND (op0, 1);
495 tree elt_type = TREE_TYPE (op0);
496 tree elt_size = TYPE_SIZE_UNIT (elt_type);
497 tree min_idx;
499 if (TREE_CODE (array_idx) != INTEGER_CST)
500 break;
501 if (TREE_CODE (elt_size) != INTEGER_CST)
502 break;
504 /* Un-bias the index by the min index of the array type. */
505 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
506 if (min_idx)
508 min_idx = TYPE_MIN_VALUE (min_idx);
509 if (min_idx)
511 if (TREE_CODE (min_idx) != INTEGER_CST)
512 break;
514 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
515 if (!integer_zerop (min_idx))
516 array_idx = int_const_binop (MINUS_EXPR, array_idx,
517 min_idx, 0);
521 /* Convert the index to a byte offset. */
522 array_idx = fold_convert (sizetype, array_idx);
523 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
525 /* Update the operands for the next round, or for folding. */
526 op1 = int_const_binop (PLUS_EXPR,
527 array_idx, op1, 0);
528 op0 = array_obj;
531 ptd_type = TREE_TYPE (res_type);
532 /* If we want a pointer to void, reconstruct the reference from the
533 array element type. A pointer to that can be trivially converted
534 to void *. This happens as we fold (void *)(ptr p+ off). */
535 if (VOID_TYPE_P (ptd_type)
536 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
537 ptd_type = TREE_TYPE (TREE_TYPE (op0));
539 /* At which point we can try some of the same things as for indirects. */
540 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
541 if (t)
543 t = build_fold_addr_expr (t);
544 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
545 return NULL_TREE;
546 SET_EXPR_LOCATION (t, loc);
549 return t;
552 /* Subroutine of fold_stmt. We perform several simplifications of the
553 memory reference tree EXPR and make sure to re-gimplify them properly
554 after propagation of constant addresses. IS_LHS is true if the
555 reference is supposed to be an lvalue. */
557 static tree
558 maybe_fold_reference (tree expr, bool is_lhs)
560 tree *t = &expr;
561 tree result;
563 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
564 || TREE_CODE (expr) == REALPART_EXPR
565 || TREE_CODE (expr) == IMAGPART_EXPR)
566 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
567 return fold_unary_loc (EXPR_LOCATION (expr),
568 TREE_CODE (expr),
569 TREE_TYPE (expr),
570 TREE_OPERAND (expr, 0));
571 else if (TREE_CODE (expr) == BIT_FIELD_REF
572 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
573 return fold_ternary_loc (EXPR_LOCATION (expr),
574 TREE_CODE (expr),
575 TREE_TYPE (expr),
576 TREE_OPERAND (expr, 0),
577 TREE_OPERAND (expr, 1),
578 TREE_OPERAND (expr, 2));
580 while (handled_component_p (*t))
581 t = &TREE_OPERAND (*t, 0);
583 /* Canonicalize MEM_REFs invariant address operand. Do this first
584 to avoid feeding non-canonical MEM_REFs elsewhere. */
585 if (TREE_CODE (*t) == MEM_REF
586 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
588 bool volatile_p = TREE_THIS_VOLATILE (*t);
589 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
590 TREE_OPERAND (*t, 0),
591 TREE_OPERAND (*t, 1));
592 if (tem)
594 TREE_THIS_VOLATILE (tem) = volatile_p;
595 *t = tem;
596 tem = maybe_fold_reference (expr, is_lhs);
597 if (tem)
598 return tem;
599 return expr;
603 if (!is_lhs
604 && (result = fold_const_aggregate_ref (expr))
605 && is_gimple_min_invariant (result))
606 return result;
608 /* Fold back MEM_REFs to reference trees. */
609 if (TREE_CODE (*t) == MEM_REF
610 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
611 && integer_zerop (TREE_OPERAND (*t, 1))
612 && (TREE_THIS_VOLATILE (*t)
613 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
614 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
615 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
616 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
617 /* We have to look out here to not drop a required conversion
618 from the rhs to the lhs if is_lhs, but we don't have the
619 rhs here to verify that. Thus require strict type
620 compatibility. */
621 && types_compatible_p (TREE_TYPE (*t),
622 TREE_TYPE (TREE_OPERAND
623 (TREE_OPERAND (*t, 0), 0))))
625 tree tem;
626 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
627 tem = maybe_fold_reference (expr, is_lhs);
628 if (tem)
629 return tem;
630 return expr;
632 else if (TREE_CODE (*t) == TARGET_MEM_REF)
634 tree tem = maybe_fold_tmr (*t);
635 if (tem)
637 *t = tem;
638 tem = maybe_fold_reference (expr, is_lhs);
639 if (tem)
640 return tem;
641 return expr;
645 return NULL_TREE;
649 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
650 replacement rhs for the statement or NULL_TREE if no simplification
651 could be made. It is assumed that the operands have been previously
652 folded. */
654 static tree
655 fold_gimple_assign (gimple_stmt_iterator *si)
657 gimple stmt = gsi_stmt (*si);
658 enum tree_code subcode = gimple_assign_rhs_code (stmt);
659 location_t loc = gimple_location (stmt);
661 tree result = NULL_TREE;
663 switch (get_gimple_rhs_class (subcode))
665 case GIMPLE_SINGLE_RHS:
667 tree rhs = gimple_assign_rhs1 (stmt);
669 /* Try to fold a conditional expression. */
670 if (TREE_CODE (rhs) == COND_EXPR)
672 tree op0 = COND_EXPR_COND (rhs);
673 tree tem;
674 bool set = false;
675 location_t cond_loc = EXPR_LOCATION (rhs);
677 if (COMPARISON_CLASS_P (op0))
679 fold_defer_overflow_warnings ();
680 tem = fold_binary_loc (cond_loc,
681 TREE_CODE (op0), TREE_TYPE (op0),
682 TREE_OPERAND (op0, 0),
683 TREE_OPERAND (op0, 1));
684 /* This is actually a conditional expression, not a GIMPLE
685 conditional statement, however, the valid_gimple_rhs_p
686 test still applies. */
687 set = (tem && is_gimple_condexpr (tem)
688 && valid_gimple_rhs_p (tem));
689 fold_undefer_overflow_warnings (set, stmt, 0);
691 else if (is_gimple_min_invariant (op0))
693 tem = op0;
694 set = true;
696 else
697 return NULL_TREE;
699 if (set)
700 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
701 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
704 else if (REFERENCE_CLASS_P (rhs))
705 return maybe_fold_reference (rhs, false);
707 else if (TREE_CODE (rhs) == ADDR_EXPR)
709 tree ref = TREE_OPERAND (rhs, 0);
710 tree tem = maybe_fold_reference (ref, true);
711 if (tem
712 && TREE_CODE (tem) == MEM_REF
713 && integer_zerop (TREE_OPERAND (tem, 1)))
714 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
715 else if (tem)
716 result = fold_convert (TREE_TYPE (rhs),
717 build_fold_addr_expr_loc (loc, tem));
718 else if (TREE_CODE (ref) == MEM_REF
719 && integer_zerop (TREE_OPERAND (ref, 1)))
720 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
723 else if (TREE_CODE (rhs) == CONSTRUCTOR
724 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
725 && (CONSTRUCTOR_NELTS (rhs)
726 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
728 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
729 unsigned i;
730 tree val;
732 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
733 if (TREE_CODE (val) != INTEGER_CST
734 && TREE_CODE (val) != REAL_CST
735 && TREE_CODE (val) != FIXED_CST)
736 return NULL_TREE;
738 return build_vector_from_ctor (TREE_TYPE (rhs),
739 CONSTRUCTOR_ELTS (rhs));
742 else if (DECL_P (rhs))
743 return unshare_expr (get_symbol_constant_value (rhs));
745 /* If we couldn't fold the RHS, hand over to the generic
746 fold routines. */
747 if (result == NULL_TREE)
748 result = fold (rhs);
750 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
751 that may have been added by fold, and "useless" type
752 conversions that might now be apparent due to propagation. */
753 STRIP_USELESS_TYPE_CONVERSION (result);
755 if (result != rhs && valid_gimple_rhs_p (result))
756 return result;
758 return NULL_TREE;
760 break;
762 case GIMPLE_UNARY_RHS:
764 tree rhs = gimple_assign_rhs1 (stmt);
766 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
767 if (result)
769 /* If the operation was a conversion do _not_ mark a
770 resulting constant with TREE_OVERFLOW if the original
771 constant was not. These conversions have implementation
772 defined behavior and retaining the TREE_OVERFLOW flag
773 here would confuse later passes such as VRP. */
774 if (CONVERT_EXPR_CODE_P (subcode)
775 && TREE_CODE (result) == INTEGER_CST
776 && TREE_CODE (rhs) == INTEGER_CST)
777 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
779 STRIP_USELESS_TYPE_CONVERSION (result);
780 if (valid_gimple_rhs_p (result))
781 return result;
783 else if (CONVERT_EXPR_CODE_P (subcode)
784 && POINTER_TYPE_P (gimple_expr_type (stmt))
785 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
787 tree type = gimple_expr_type (stmt);
788 tree t = maybe_fold_offset_to_address (loc,
789 gimple_assign_rhs1 (stmt),
790 integer_zero_node, type);
791 if (t)
792 return t;
795 break;
797 case GIMPLE_BINARY_RHS:
798 /* Try to fold pointer addition. */
799 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
801 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
802 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
804 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
805 if (!useless_type_conversion_p
806 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
807 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
809 result = maybe_fold_stmt_addition (gimple_location (stmt),
810 type,
811 gimple_assign_rhs1 (stmt),
812 gimple_assign_rhs2 (stmt));
815 if (!result)
816 result = fold_binary_loc (loc, subcode,
817 TREE_TYPE (gimple_assign_lhs (stmt)),
818 gimple_assign_rhs1 (stmt),
819 gimple_assign_rhs2 (stmt));
821 if (result)
823 STRIP_USELESS_TYPE_CONVERSION (result);
824 if (valid_gimple_rhs_p (result))
825 return result;
827 /* Fold might have produced non-GIMPLE, so if we trust it blindly
828 we lose canonicalization opportunities. Do not go again
829 through fold here though, or the same non-GIMPLE will be
830 produced. */
831 if (commutative_tree_code (subcode)
832 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
833 gimple_assign_rhs2 (stmt), false))
834 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
835 gimple_assign_rhs2 (stmt),
836 gimple_assign_rhs1 (stmt));
838 break;
840 case GIMPLE_TERNARY_RHS:
841 result = fold_ternary_loc (loc, subcode,
842 TREE_TYPE (gimple_assign_lhs (stmt)),
843 gimple_assign_rhs1 (stmt),
844 gimple_assign_rhs2 (stmt),
845 gimple_assign_rhs3 (stmt));
847 if (result)
849 STRIP_USELESS_TYPE_CONVERSION (result);
850 if (valid_gimple_rhs_p (result))
851 return result;
853 /* Fold might have produced non-GIMPLE, so if we trust it blindly
854 we lose canonicalization opportunities. Do not go again
855 through fold here though, or the same non-GIMPLE will be
856 produced. */
857 if (commutative_ternary_tree_code (subcode)
858 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
859 gimple_assign_rhs2 (stmt), false))
860 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
861 gimple_assign_rhs2 (stmt),
862 gimple_assign_rhs1 (stmt),
863 gimple_assign_rhs3 (stmt));
865 break;
867 case GIMPLE_INVALID_RHS:
868 gcc_unreachable ();
871 return NULL_TREE;
874 /* Attempt to fold a conditional statement. Return true if any changes were
875 made. We only attempt to fold the condition expression, and do not perform
876 any transformation that would require alteration of the cfg. It is
877 assumed that the operands have been previously folded. */
879 static bool
880 fold_gimple_cond (gimple stmt)
882 tree result = fold_binary_loc (gimple_location (stmt),
883 gimple_cond_code (stmt),
884 boolean_type_node,
885 gimple_cond_lhs (stmt),
886 gimple_cond_rhs (stmt));
888 if (result)
890 STRIP_USELESS_TYPE_CONVERSION (result);
891 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
893 gimple_cond_set_condition_from_tree (stmt, result);
894 return true;
898 return false;
901 /* Convert EXPR into a GIMPLE value suitable for substitution on the
902 RHS of an assignment. Insert the necessary statements before
903 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
904 is replaced. If the call is expected to produces a result, then it
905 is replaced by an assignment of the new RHS to the result variable.
906 If the result is to be ignored, then the call is replaced by a
907 GIMPLE_NOP. A proper VDEF chain is retained by making the first
908 VUSE and the last VDEF of the whole sequence be the same as the replaced
909 statement and using new SSA names for stores in between. */
911 void
912 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
914 tree lhs;
915 tree tmp = NULL_TREE; /* Silence warning. */
916 gimple stmt, new_stmt;
917 gimple_stmt_iterator i;
918 gimple_seq stmts = gimple_seq_alloc();
919 struct gimplify_ctx gctx;
920 gimple last = NULL;
921 gimple laststore = NULL;
922 tree reaching_vuse;
924 stmt = gsi_stmt (*si_p);
926 gcc_assert (is_gimple_call (stmt));
928 lhs = gimple_call_lhs (stmt);
929 reaching_vuse = gimple_vuse (stmt);
931 push_gimplify_context (&gctx);
933 if (lhs == NULL_TREE)
935 gimplify_and_add (expr, &stmts);
936 /* We can end up with folding a memcpy of an empty class assignment
937 which gets optimized away by C++ gimplification. */
938 if (gimple_seq_empty_p (stmts))
940 pop_gimplify_context (NULL);
941 if (gimple_in_ssa_p (cfun))
943 unlink_stmt_vdef (stmt);
944 release_defs (stmt);
946 gsi_remove (si_p, true);
947 return;
950 else
951 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
953 pop_gimplify_context (NULL);
955 if (gimple_has_location (stmt))
956 annotate_all_with_location (stmts, gimple_location (stmt));
958 /* The replacement can expose previously unreferenced variables. */
959 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
961 if (last)
963 gsi_insert_before (si_p, last, GSI_NEW_STMT);
964 gsi_next (si_p);
966 new_stmt = gsi_stmt (i);
967 if (gimple_in_ssa_p (cfun))
969 find_new_referenced_vars (new_stmt);
970 mark_symbols_for_renaming (new_stmt);
972 /* If the new statement has a VUSE, update it with exact SSA name we
973 know will reach this one. */
974 if (gimple_vuse (new_stmt))
976 /* If we've also seen a previous store create a new VDEF for
977 the latter one, and make that the new reaching VUSE. */
978 if (laststore)
980 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
981 gimple_set_vdef (laststore, reaching_vuse);
982 update_stmt (laststore);
983 laststore = NULL;
985 gimple_set_vuse (new_stmt, reaching_vuse);
986 gimple_set_modified (new_stmt, true);
988 if (gimple_assign_single_p (new_stmt)
989 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
991 laststore = new_stmt;
993 last = new_stmt;
996 if (lhs == NULL_TREE)
998 /* If we replace a call without LHS that has a VDEF and our new
999 sequence ends with a store we must make that store have the same
1000 vdef in order not to break the sequencing. This can happen
1001 for instance when folding memcpy calls into assignments. */
1002 if (gimple_vdef (stmt) && laststore)
1004 gimple_set_vdef (laststore, gimple_vdef (stmt));
1005 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1006 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1007 update_stmt (laststore);
1009 else if (gimple_in_ssa_p (cfun))
1011 unlink_stmt_vdef (stmt);
1012 release_defs (stmt);
1014 new_stmt = last;
1016 else
1018 if (last)
1020 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1021 gsi_next (si_p);
1023 if (laststore && is_gimple_reg (lhs))
1025 gimple_set_vdef (laststore, gimple_vdef (stmt));
1026 update_stmt (laststore);
1027 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1028 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1029 laststore = NULL;
1031 else if (laststore)
1033 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1034 gimple_set_vdef (laststore, reaching_vuse);
1035 update_stmt (laststore);
1036 laststore = NULL;
1038 new_stmt = gimple_build_assign (lhs, tmp);
1039 if (!is_gimple_reg (tmp))
1040 gimple_set_vuse (new_stmt, reaching_vuse);
1041 if (!is_gimple_reg (lhs))
1043 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1044 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1045 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1047 else if (reaching_vuse == gimple_vuse (stmt))
1048 unlink_stmt_vdef (stmt);
1051 gimple_set_location (new_stmt, gimple_location (stmt));
1052 gsi_replace (si_p, new_stmt, false);
1055 /* Return the string length, maximum string length or maximum value of
1056 ARG in LENGTH.
1057 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1058 is not NULL and, for TYPE == 0, its value is not equal to the length
1059 we determine or if we are unable to determine the length or value,
1060 return false. VISITED is a bitmap of visited variables.
1061 TYPE is 0 if string length should be returned, 1 for maximum string
1062 length and 2 for maximum value ARG can have. */
1064 static bool
1065 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1067 tree var, val;
1068 gimple def_stmt;
1070 if (TREE_CODE (arg) != SSA_NAME)
1072 if (TREE_CODE (arg) == COND_EXPR)
1073 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1074 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1075 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1076 else if (TREE_CODE (arg) == ADDR_EXPR
1077 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1078 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1080 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1081 if (TREE_CODE (aop0) == INDIRECT_REF
1082 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1083 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1084 length, visited, type);
1087 if (type == 2)
1089 val = arg;
1090 if (TREE_CODE (val) != INTEGER_CST
1091 || tree_int_cst_sgn (val) < 0)
1092 return false;
1094 else
1095 val = c_strlen (arg, 1);
1096 if (!val)
1097 return false;
1099 if (*length)
1101 if (type > 0)
1103 if (TREE_CODE (*length) != INTEGER_CST
1104 || TREE_CODE (val) != INTEGER_CST)
1105 return false;
1107 if (tree_int_cst_lt (*length, val))
1108 *length = val;
1109 return true;
1111 else if (simple_cst_equal (val, *length) != 1)
1112 return false;
1115 *length = val;
1116 return true;
1119 /* If we were already here, break the infinite cycle. */
1120 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1121 return true;
1123 var = arg;
1124 def_stmt = SSA_NAME_DEF_STMT (var);
1126 switch (gimple_code (def_stmt))
1128 case GIMPLE_ASSIGN:
1129 /* The RHS of the statement defining VAR must either have a
1130 constant length or come from another SSA_NAME with a constant
1131 length. */
1132 if (gimple_assign_single_p (def_stmt)
1133 || gimple_assign_unary_nop_p (def_stmt))
1135 tree rhs = gimple_assign_rhs1 (def_stmt);
1136 return get_maxval_strlen (rhs, length, visited, type);
1138 return false;
1140 case GIMPLE_PHI:
1142 /* All the arguments of the PHI node must have the same constant
1143 length. */
1144 unsigned i;
1146 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1148 tree arg = gimple_phi_arg (def_stmt, i)->def;
1150 /* If this PHI has itself as an argument, we cannot
1151 determine the string length of this argument. However,
1152 if we can find a constant string length for the other
1153 PHI args then we can still be sure that this is a
1154 constant string length. So be optimistic and just
1155 continue with the next argument. */
1156 if (arg == gimple_phi_result (def_stmt))
1157 continue;
1159 if (!get_maxval_strlen (arg, length, visited, type))
1160 return false;
1163 return true;
1165 default:
1166 return false;
1171 /* Fold builtin call in statement STMT. Returns a simplified tree.
1172 We may return a non-constant expression, including another call
1173 to a different function and with different arguments, e.g.,
1174 substituting memcpy for strcpy when the string length is known.
1175 Note that some builtins expand into inline code that may not
1176 be valid in GIMPLE. Callers must take care. */
1178 tree
1179 gimple_fold_builtin (gimple stmt)
1181 tree result, val[3];
1182 tree callee, a;
1183 int arg_idx, type;
1184 bitmap visited;
1185 bool ignore;
1186 int nargs;
1187 location_t loc = gimple_location (stmt);
1189 gcc_assert (is_gimple_call (stmt));
1191 ignore = (gimple_call_lhs (stmt) == NULL);
1193 /* First try the generic builtin folder. If that succeeds, return the
1194 result directly. */
1195 result = fold_call_stmt (stmt, ignore);
1196 if (result)
1198 if (ignore)
1199 STRIP_NOPS (result);
1200 return result;
1203 /* Ignore MD builtins. */
1204 callee = gimple_call_fndecl (stmt);
1205 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1206 return NULL_TREE;
1208 /* If the builtin could not be folded, and it has no argument list,
1209 we're done. */
1210 nargs = gimple_call_num_args (stmt);
1211 if (nargs == 0)
1212 return NULL_TREE;
1214 /* Limit the work only for builtins we know how to simplify. */
1215 switch (DECL_FUNCTION_CODE (callee))
1217 case BUILT_IN_STRLEN:
1218 case BUILT_IN_FPUTS:
1219 case BUILT_IN_FPUTS_UNLOCKED:
1220 arg_idx = 0;
1221 type = 0;
1222 break;
1223 case BUILT_IN_STRCPY:
1224 case BUILT_IN_STRNCPY:
1225 arg_idx = 1;
1226 type = 0;
1227 break;
1228 case BUILT_IN_MEMCPY_CHK:
1229 case BUILT_IN_MEMPCPY_CHK:
1230 case BUILT_IN_MEMMOVE_CHK:
1231 case BUILT_IN_MEMSET_CHK:
1232 case BUILT_IN_STRNCPY_CHK:
1233 arg_idx = 2;
1234 type = 2;
1235 break;
1236 case BUILT_IN_STRCPY_CHK:
1237 case BUILT_IN_STPCPY_CHK:
1238 arg_idx = 1;
1239 type = 1;
1240 break;
1241 case BUILT_IN_SNPRINTF_CHK:
1242 case BUILT_IN_VSNPRINTF_CHK:
1243 arg_idx = 1;
1244 type = 2;
1245 break;
1246 default:
1247 return NULL_TREE;
1250 if (arg_idx >= nargs)
1251 return NULL_TREE;
1253 /* Try to use the dataflow information gathered by the CCP process. */
1254 visited = BITMAP_ALLOC (NULL);
1255 bitmap_clear (visited);
1257 memset (val, 0, sizeof (val));
1258 a = gimple_call_arg (stmt, arg_idx);
1259 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1260 val[arg_idx] = NULL_TREE;
1262 BITMAP_FREE (visited);
1264 result = NULL_TREE;
1265 switch (DECL_FUNCTION_CODE (callee))
1267 case BUILT_IN_STRLEN:
1268 if (val[0] && nargs == 1)
1270 tree new_val =
1271 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1273 /* If the result is not a valid gimple value, or not a cast
1274 of a valid gimple value, then we cannot use the result. */
1275 if (is_gimple_val (new_val)
1276 || (CONVERT_EXPR_P (new_val)
1277 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1278 return new_val;
1280 break;
1282 case BUILT_IN_STRCPY:
1283 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1284 result = fold_builtin_strcpy (loc, callee,
1285 gimple_call_arg (stmt, 0),
1286 gimple_call_arg (stmt, 1),
1287 val[1]);
1288 break;
1290 case BUILT_IN_STRNCPY:
1291 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1292 result = fold_builtin_strncpy (loc, callee,
1293 gimple_call_arg (stmt, 0),
1294 gimple_call_arg (stmt, 1),
1295 gimple_call_arg (stmt, 2),
1296 val[1]);
1297 break;
1299 case BUILT_IN_FPUTS:
1300 if (nargs == 2)
1301 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1302 gimple_call_arg (stmt, 1),
1303 ignore, false, val[0]);
1304 break;
1306 case BUILT_IN_FPUTS_UNLOCKED:
1307 if (nargs == 2)
1308 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1309 gimple_call_arg (stmt, 1),
1310 ignore, true, val[0]);
1311 break;
1313 case BUILT_IN_MEMCPY_CHK:
1314 case BUILT_IN_MEMPCPY_CHK:
1315 case BUILT_IN_MEMMOVE_CHK:
1316 case BUILT_IN_MEMSET_CHK:
1317 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1318 result = fold_builtin_memory_chk (loc, callee,
1319 gimple_call_arg (stmt, 0),
1320 gimple_call_arg (stmt, 1),
1321 gimple_call_arg (stmt, 2),
1322 gimple_call_arg (stmt, 3),
1323 val[2], ignore,
1324 DECL_FUNCTION_CODE (callee));
1325 break;
1327 case BUILT_IN_STRCPY_CHK:
1328 case BUILT_IN_STPCPY_CHK:
1329 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1330 result = fold_builtin_stxcpy_chk (loc, callee,
1331 gimple_call_arg (stmt, 0),
1332 gimple_call_arg (stmt, 1),
1333 gimple_call_arg (stmt, 2),
1334 val[1], ignore,
1335 DECL_FUNCTION_CODE (callee));
1336 break;
1338 case BUILT_IN_STRNCPY_CHK:
1339 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1340 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1341 gimple_call_arg (stmt, 1),
1342 gimple_call_arg (stmt, 2),
1343 gimple_call_arg (stmt, 3),
1344 val[2]);
1345 break;
1347 case BUILT_IN_SNPRINTF_CHK:
1348 case BUILT_IN_VSNPRINTF_CHK:
1349 if (val[1] && is_gimple_val (val[1]))
1350 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1351 DECL_FUNCTION_CODE (callee));
1352 break;
1354 default:
1355 gcc_unreachable ();
1358 if (result && ignore)
1359 result = fold_ignored_result (result);
1360 return result;
1363 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1364 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1365 KNOWN_BINFO carries the binfo describing the true type of
1366 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1367 with a this adjustment, the constant which should be added to this pointer
1368 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1369 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1371 tree
1372 gimple_get_virt_mehtod_for_binfo (HOST_WIDE_INT token, tree known_binfo,
1373 tree *delta, bool refuse_thunks)
1375 HOST_WIDE_INT i;
1376 tree v, fndecl;
1377 struct cgraph_node *node;
1379 v = BINFO_VIRTUALS (known_binfo);
1380 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1381 if (!v)
1382 return NULL_TREE;
1383 i = 0;
1384 while (i != token)
1386 i += (TARGET_VTABLE_USES_DESCRIPTORS
1387 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1388 v = TREE_CHAIN (v);
1391 fndecl = TREE_VALUE (v);
1392 node = cgraph_get_node_or_alias (fndecl);
1393 if (refuse_thunks
1394 && (!node
1395 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1396 thunks are represented by a constant in TREE_PURPOSE of items in
1397 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1398 yet.
1400 FIXME: Remove the following condition once we are able to represent
1401 thunk information on call graph edges. */
1402 || (node->same_body_alias && node->thunk.thunk_p)))
1403 return NULL_TREE;
1405 /* When cgraph node is missing and function is not public, we cannot
1406 devirtualize. This can happen in WHOPR when the actual method
1407 ends up in other partition, because we found devirtualization
1408 possibility too late. */
1409 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1410 return NULL_TREE;
1412 *delta = TREE_PURPOSE (v);
1413 gcc_checking_assert (host_integerp (*delta, 0));
1414 return fndecl;
1417 /* Generate code adjusting the first parameter of a call statement determined
1418 by GSI by DELTA. */
1420 void
1421 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1423 gimple call_stmt = gsi_stmt (*gsi);
1424 tree parm, tmp;
1425 gimple new_stmt;
1427 delta = fold_convert (sizetype, delta);
1428 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1429 parm = gimple_call_arg (call_stmt, 0);
1430 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1431 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1432 add_referenced_var (tmp);
1434 tmp = make_ssa_name (tmp, NULL);
1435 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1436 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1437 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1438 gimple_call_set_arg (call_stmt, 0, tmp);
1441 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1442 The statement may be replaced by another statement, e.g., if the call
1443 simplifies to a constant value. Return true if any changes were made.
1444 It is assumed that the operands have been previously folded. */
1446 bool
1447 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1449 gimple stmt = gsi_stmt (*gsi);
1451 tree callee = gimple_call_fndecl (stmt);
1453 /* Check for builtins that CCP can handle using information not
1454 available in the generic fold routines. */
1455 if (!inplace && callee && DECL_BUILT_IN (callee))
1457 tree result = gimple_fold_builtin (stmt);
1459 if (result)
1461 if (!update_call_from_tree (gsi, result))
1462 gimplify_and_update_call_from_tree (gsi, result);
1463 return true;
1466 return false;
1469 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1470 distinguishes both cases. */
1472 static bool
1473 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1475 bool changed = false;
1476 gimple stmt = gsi_stmt (*gsi);
1477 unsigned i;
1479 /* Fold the main computation performed by the statement. */
1480 switch (gimple_code (stmt))
1482 case GIMPLE_ASSIGN:
1484 unsigned old_num_ops = gimple_num_ops (stmt);
1485 tree new_rhs = fold_gimple_assign (gsi);
1486 tree lhs = gimple_assign_lhs (stmt);
1487 if (new_rhs
1488 && !useless_type_conversion_p (TREE_TYPE (lhs),
1489 TREE_TYPE (new_rhs)))
1490 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1491 if (new_rhs
1492 && (!inplace
1493 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1495 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1496 changed = true;
1498 break;
1501 case GIMPLE_COND:
1502 changed |= fold_gimple_cond (stmt);
1503 break;
1505 case GIMPLE_CALL:
1506 /* Fold *& in call arguments. */
1507 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1508 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1510 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1511 if (tmp)
1513 gimple_call_set_arg (stmt, i, tmp);
1514 changed = true;
1517 changed |= gimple_fold_call (gsi, inplace);
1518 break;
1520 case GIMPLE_ASM:
1521 /* Fold *& in asm operands. */
1522 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1524 tree link = gimple_asm_output_op (stmt, i);
1525 tree op = TREE_VALUE (link);
1526 if (REFERENCE_CLASS_P (op)
1527 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1529 TREE_VALUE (link) = op;
1530 changed = true;
1533 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1535 tree link = gimple_asm_input_op (stmt, i);
1536 tree op = TREE_VALUE (link);
1537 if (REFERENCE_CLASS_P (op)
1538 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1540 TREE_VALUE (link) = op;
1541 changed = true;
1544 break;
1546 case GIMPLE_DEBUG:
1547 if (gimple_debug_bind_p (stmt))
1549 tree val = gimple_debug_bind_get_value (stmt);
1550 if (val
1551 && REFERENCE_CLASS_P (val))
1553 tree tem = maybe_fold_reference (val, false);
1554 if (tem)
1556 gimple_debug_bind_set_value (stmt, tem);
1557 changed = true;
1561 break;
1563 default:;
1566 stmt = gsi_stmt (*gsi);
1568 /* Fold *& on the lhs. */
1569 if (gimple_has_lhs (stmt))
1571 tree lhs = gimple_get_lhs (stmt);
1572 if (lhs && REFERENCE_CLASS_P (lhs))
1574 tree new_lhs = maybe_fold_reference (lhs, true);
1575 if (new_lhs)
1577 gimple_set_lhs (stmt, new_lhs);
1578 changed = true;
1583 return changed;
1586 /* Fold the statement pointed to by GSI. In some cases, this function may
1587 replace the whole statement with a new one. Returns true iff folding
1588 makes any changes.
1589 The statement pointed to by GSI should be in valid gimple form but may
1590 be in unfolded state as resulting from for example constant propagation
1591 which can produce *&x = 0. */
1593 bool
1594 fold_stmt (gimple_stmt_iterator *gsi)
1596 return fold_stmt_1 (gsi, false);
1599 /* Perform the minimal folding on statement STMT. Only operations like
1600 *&x created by constant propagation are handled. The statement cannot
1601 be replaced with a new one. Return true if the statement was
1602 changed, false otherwise.
1603 The statement STMT should be in valid gimple form but may
1604 be in unfolded state as resulting from for example constant propagation
1605 which can produce *&x = 0. */
1607 bool
1608 fold_stmt_inplace (gimple stmt)
1610 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1611 bool changed = fold_stmt_1 (&gsi, true);
1612 gcc_assert (gsi_stmt (gsi) == stmt);
1613 return changed;
1616 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1617 if EXPR is null or we don't know how.
1618 If non-null, the result always has boolean type. */
1620 static tree
1621 canonicalize_bool (tree expr, bool invert)
1623 if (!expr)
1624 return NULL_TREE;
1625 else if (invert)
1627 if (integer_nonzerop (expr))
1628 return boolean_false_node;
1629 else if (integer_zerop (expr))
1630 return boolean_true_node;
1631 else if (TREE_CODE (expr) == SSA_NAME)
1632 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1633 build_int_cst (TREE_TYPE (expr), 0));
1634 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1635 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1636 boolean_type_node,
1637 TREE_OPERAND (expr, 0),
1638 TREE_OPERAND (expr, 1));
1639 else
1640 return NULL_TREE;
1642 else
1644 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1645 return expr;
1646 if (integer_nonzerop (expr))
1647 return boolean_true_node;
1648 else if (integer_zerop (expr))
1649 return boolean_false_node;
1650 else if (TREE_CODE (expr) == SSA_NAME)
1651 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1652 build_int_cst (TREE_TYPE (expr), 0));
1653 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1654 return fold_build2 (TREE_CODE (expr),
1655 boolean_type_node,
1656 TREE_OPERAND (expr, 0),
1657 TREE_OPERAND (expr, 1));
1658 else
1659 return NULL_TREE;
1663 /* Check to see if a boolean expression EXPR is logically equivalent to the
1664 comparison (OP1 CODE OP2). Check for various identities involving
1665 SSA_NAMEs. */
1667 static bool
1668 same_bool_comparison_p (const_tree expr, enum tree_code code,
1669 const_tree op1, const_tree op2)
1671 gimple s;
1673 /* The obvious case. */
1674 if (TREE_CODE (expr) == code
1675 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1676 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1677 return true;
1679 /* Check for comparing (name, name != 0) and the case where expr
1680 is an SSA_NAME with a definition matching the comparison. */
1681 if (TREE_CODE (expr) == SSA_NAME
1682 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1684 if (operand_equal_p (expr, op1, 0))
1685 return ((code == NE_EXPR && integer_zerop (op2))
1686 || (code == EQ_EXPR && integer_nonzerop (op2)));
1687 s = SSA_NAME_DEF_STMT (expr);
1688 if (is_gimple_assign (s)
1689 && gimple_assign_rhs_code (s) == code
1690 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1691 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1692 return true;
1695 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1696 of name is a comparison, recurse. */
1697 if (TREE_CODE (op1) == SSA_NAME
1698 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1700 s = SSA_NAME_DEF_STMT (op1);
1701 if (is_gimple_assign (s)
1702 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1704 enum tree_code c = gimple_assign_rhs_code (s);
1705 if ((c == NE_EXPR && integer_zerop (op2))
1706 || (c == EQ_EXPR && integer_nonzerop (op2)))
1707 return same_bool_comparison_p (expr, c,
1708 gimple_assign_rhs1 (s),
1709 gimple_assign_rhs2 (s));
1710 if ((c == EQ_EXPR && integer_zerop (op2))
1711 || (c == NE_EXPR && integer_nonzerop (op2)))
1712 return same_bool_comparison_p (expr,
1713 invert_tree_comparison (c, false),
1714 gimple_assign_rhs1 (s),
1715 gimple_assign_rhs2 (s));
1718 return false;
1721 /* Check to see if two boolean expressions OP1 and OP2 are logically
1722 equivalent. */
1724 static bool
1725 same_bool_result_p (const_tree op1, const_tree op2)
1727 /* Simple cases first. */
1728 if (operand_equal_p (op1, op2, 0))
1729 return true;
1731 /* Check the cases where at least one of the operands is a comparison.
1732 These are a bit smarter than operand_equal_p in that they apply some
1733 identifies on SSA_NAMEs. */
1734 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1735 && same_bool_comparison_p (op1, TREE_CODE (op2),
1736 TREE_OPERAND (op2, 0),
1737 TREE_OPERAND (op2, 1)))
1738 return true;
1739 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1740 && same_bool_comparison_p (op2, TREE_CODE (op1),
1741 TREE_OPERAND (op1, 0),
1742 TREE_OPERAND (op1, 1)))
1743 return true;
1745 /* Default case. */
1746 return false;
1749 /* Forward declarations for some mutually recursive functions. */
1751 static tree
1752 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1753 enum tree_code code2, tree op2a, tree op2b);
1754 static tree
1755 and_var_with_comparison (tree var, bool invert,
1756 enum tree_code code2, tree op2a, tree op2b);
1757 static tree
1758 and_var_with_comparison_1 (gimple stmt,
1759 enum tree_code code2, tree op2a, tree op2b);
1760 static tree
1761 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1762 enum tree_code code2, tree op2a, tree op2b);
1763 static tree
1764 or_var_with_comparison (tree var, bool invert,
1765 enum tree_code code2, tree op2a, tree op2b);
1766 static tree
1767 or_var_with_comparison_1 (gimple stmt,
1768 enum tree_code code2, tree op2a, tree op2b);
1770 /* Helper function for and_comparisons_1: try to simplify the AND of the
1771 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1772 If INVERT is true, invert the value of the VAR before doing the AND.
1773 Return NULL_EXPR if we can't simplify this to a single expression. */
1775 static tree
1776 and_var_with_comparison (tree var, bool invert,
1777 enum tree_code code2, tree op2a, tree op2b)
1779 tree t;
1780 gimple stmt = SSA_NAME_DEF_STMT (var);
1782 /* We can only deal with variables whose definitions are assignments. */
1783 if (!is_gimple_assign (stmt))
1784 return NULL_TREE;
1786 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1787 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1788 Then we only have to consider the simpler non-inverted cases. */
1789 if (invert)
1790 t = or_var_with_comparison_1 (stmt,
1791 invert_tree_comparison (code2, false),
1792 op2a, op2b);
1793 else
1794 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1795 return canonicalize_bool (t, invert);
1798 /* Try to simplify the AND of the ssa variable defined by the assignment
1799 STMT with the comparison specified by (OP2A CODE2 OP2B).
1800 Return NULL_EXPR if we can't simplify this to a single expression. */
1802 static tree
1803 and_var_with_comparison_1 (gimple stmt,
1804 enum tree_code code2, tree op2a, tree op2b)
1806 tree var = gimple_assign_lhs (stmt);
1807 tree true_test_var = NULL_TREE;
1808 tree false_test_var = NULL_TREE;
1809 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1811 /* Check for identities like (var AND (var == 0)) => false. */
1812 if (TREE_CODE (op2a) == SSA_NAME
1813 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1815 if ((code2 == NE_EXPR && integer_zerop (op2b))
1816 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1818 true_test_var = op2a;
1819 if (var == true_test_var)
1820 return var;
1822 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1823 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1825 false_test_var = op2a;
1826 if (var == false_test_var)
1827 return boolean_false_node;
1831 /* If the definition is a comparison, recurse on it. */
1832 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1834 tree t = and_comparisons_1 (innercode,
1835 gimple_assign_rhs1 (stmt),
1836 gimple_assign_rhs2 (stmt),
1837 code2,
1838 op2a,
1839 op2b);
1840 if (t)
1841 return t;
1844 /* If the definition is an AND or OR expression, we may be able to
1845 simplify by reassociating. */
1846 if (innercode == TRUTH_AND_EXPR
1847 || innercode == TRUTH_OR_EXPR
1848 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1849 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1851 tree inner1 = gimple_assign_rhs1 (stmt);
1852 tree inner2 = gimple_assign_rhs2 (stmt);
1853 gimple s;
1854 tree t;
1855 tree partial = NULL_TREE;
1856 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1858 /* Check for boolean identities that don't require recursive examination
1859 of inner1/inner2:
1860 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1861 inner1 AND (inner1 OR inner2) => inner1
1862 !inner1 AND (inner1 AND inner2) => false
1863 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1864 Likewise for similar cases involving inner2. */
1865 if (inner1 == true_test_var)
1866 return (is_and ? var : inner1);
1867 else if (inner2 == true_test_var)
1868 return (is_and ? var : inner2);
1869 else if (inner1 == false_test_var)
1870 return (is_and
1871 ? boolean_false_node
1872 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1873 else if (inner2 == false_test_var)
1874 return (is_and
1875 ? boolean_false_node
1876 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1878 /* Next, redistribute/reassociate the AND across the inner tests.
1879 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1880 if (TREE_CODE (inner1) == SSA_NAME
1881 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1882 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1883 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1884 gimple_assign_rhs1 (s),
1885 gimple_assign_rhs2 (s),
1886 code2, op2a, op2b)))
1888 /* Handle the AND case, where we are reassociating:
1889 (inner1 AND inner2) AND (op2a code2 op2b)
1890 => (t AND inner2)
1891 If the partial result t is a constant, we win. Otherwise
1892 continue on to try reassociating with the other inner test. */
1893 if (is_and)
1895 if (integer_onep (t))
1896 return inner2;
1897 else if (integer_zerop (t))
1898 return boolean_false_node;
1901 /* Handle the OR case, where we are redistributing:
1902 (inner1 OR inner2) AND (op2a code2 op2b)
1903 => (t OR (inner2 AND (op2a code2 op2b))) */
1904 else if (integer_onep (t))
1905 return boolean_true_node;
1907 /* Save partial result for later. */
1908 partial = t;
1911 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1912 if (TREE_CODE (inner2) == SSA_NAME
1913 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1914 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1915 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1916 gimple_assign_rhs1 (s),
1917 gimple_assign_rhs2 (s),
1918 code2, op2a, op2b)))
1920 /* Handle the AND case, where we are reassociating:
1921 (inner1 AND inner2) AND (op2a code2 op2b)
1922 => (inner1 AND t) */
1923 if (is_and)
1925 if (integer_onep (t))
1926 return inner1;
1927 else if (integer_zerop (t))
1928 return boolean_false_node;
1929 /* If both are the same, we can apply the identity
1930 (x AND x) == x. */
1931 else if (partial && same_bool_result_p (t, partial))
1932 return t;
1935 /* Handle the OR case. where we are redistributing:
1936 (inner1 OR inner2) AND (op2a code2 op2b)
1937 => (t OR (inner1 AND (op2a code2 op2b)))
1938 => (t OR partial) */
1939 else
1941 if (integer_onep (t))
1942 return boolean_true_node;
1943 else if (partial)
1945 /* We already got a simplification for the other
1946 operand to the redistributed OR expression. The
1947 interesting case is when at least one is false.
1948 Or, if both are the same, we can apply the identity
1949 (x OR x) == x. */
1950 if (integer_zerop (partial))
1951 return t;
1952 else if (integer_zerop (t))
1953 return partial;
1954 else if (same_bool_result_p (t, partial))
1955 return t;
1960 return NULL_TREE;
1963 /* Try to simplify the AND of two comparisons defined by
1964 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1965 If this can be done without constructing an intermediate value,
1966 return the resulting tree; otherwise NULL_TREE is returned.
1967 This function is deliberately asymmetric as it recurses on SSA_DEFs
1968 in the first comparison but not the second. */
1970 static tree
1971 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1972 enum tree_code code2, tree op2a, tree op2b)
1974 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1975 if (operand_equal_p (op1a, op2a, 0)
1976 && operand_equal_p (op1b, op2b, 0))
1978 tree t = combine_comparisons (UNKNOWN_LOCATION,
1979 TRUTH_ANDIF_EXPR, code1, code2,
1980 boolean_type_node, op1a, op1b);
1981 if (t)
1982 return t;
1985 /* Likewise the swapped case of the above. */
1986 if (operand_equal_p (op1a, op2b, 0)
1987 && operand_equal_p (op1b, op2a, 0))
1989 tree t = combine_comparisons (UNKNOWN_LOCATION,
1990 TRUTH_ANDIF_EXPR, code1,
1991 swap_tree_comparison (code2),
1992 boolean_type_node, op1a, op1b);
1993 if (t)
1994 return t;
1997 /* If both comparisons are of the same value against constants, we might
1998 be able to merge them. */
1999 if (operand_equal_p (op1a, op2a, 0)
2000 && TREE_CODE (op1b) == INTEGER_CST
2001 && TREE_CODE (op2b) == INTEGER_CST)
2003 int cmp = tree_int_cst_compare (op1b, op2b);
2005 /* If we have (op1a == op1b), we should either be able to
2006 return that or FALSE, depending on whether the constant op1b
2007 also satisfies the other comparison against op2b. */
2008 if (code1 == EQ_EXPR)
2010 bool done = true;
2011 bool val;
2012 switch (code2)
2014 case EQ_EXPR: val = (cmp == 0); break;
2015 case NE_EXPR: val = (cmp != 0); break;
2016 case LT_EXPR: val = (cmp < 0); break;
2017 case GT_EXPR: val = (cmp > 0); break;
2018 case LE_EXPR: val = (cmp <= 0); break;
2019 case GE_EXPR: val = (cmp >= 0); break;
2020 default: done = false;
2022 if (done)
2024 if (val)
2025 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2026 else
2027 return boolean_false_node;
2030 /* Likewise if the second comparison is an == comparison. */
2031 else if (code2 == EQ_EXPR)
2033 bool done = true;
2034 bool val;
2035 switch (code1)
2037 case EQ_EXPR: val = (cmp == 0); break;
2038 case NE_EXPR: val = (cmp != 0); break;
2039 case LT_EXPR: val = (cmp > 0); break;
2040 case GT_EXPR: val = (cmp < 0); break;
2041 case LE_EXPR: val = (cmp >= 0); break;
2042 case GE_EXPR: val = (cmp <= 0); break;
2043 default: done = false;
2045 if (done)
2047 if (val)
2048 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2049 else
2050 return boolean_false_node;
2054 /* Same business with inequality tests. */
2055 else if (code1 == NE_EXPR)
2057 bool val;
2058 switch (code2)
2060 case EQ_EXPR: val = (cmp != 0); break;
2061 case NE_EXPR: val = (cmp == 0); break;
2062 case LT_EXPR: val = (cmp >= 0); break;
2063 case GT_EXPR: val = (cmp <= 0); break;
2064 case LE_EXPR: val = (cmp > 0); break;
2065 case GE_EXPR: val = (cmp < 0); break;
2066 default:
2067 val = false;
2069 if (val)
2070 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2072 else if (code2 == NE_EXPR)
2074 bool val;
2075 switch (code1)
2077 case EQ_EXPR: val = (cmp == 0); break;
2078 case NE_EXPR: val = (cmp != 0); break;
2079 case LT_EXPR: val = (cmp <= 0); break;
2080 case GT_EXPR: val = (cmp >= 0); break;
2081 case LE_EXPR: val = (cmp < 0); break;
2082 case GE_EXPR: val = (cmp > 0); break;
2083 default:
2084 val = false;
2086 if (val)
2087 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2090 /* Chose the more restrictive of two < or <= comparisons. */
2091 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2092 && (code2 == LT_EXPR || code2 == LE_EXPR))
2094 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2095 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2096 else
2097 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2100 /* Likewise chose the more restrictive of two > or >= comparisons. */
2101 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2102 && (code2 == GT_EXPR || code2 == GE_EXPR))
2104 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2105 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2106 else
2107 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2110 /* Check for singleton ranges. */
2111 else if (cmp == 0
2112 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2113 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2114 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2116 /* Check for disjoint ranges. */
2117 else if (cmp <= 0
2118 && (code1 == LT_EXPR || code1 == LE_EXPR)
2119 && (code2 == GT_EXPR || code2 == GE_EXPR))
2120 return boolean_false_node;
2121 else if (cmp >= 0
2122 && (code1 == GT_EXPR || code1 == GE_EXPR)
2123 && (code2 == LT_EXPR || code2 == LE_EXPR))
2124 return boolean_false_node;
2127 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2128 NAME's definition is a truth value. See if there are any simplifications
2129 that can be done against the NAME's definition. */
2130 if (TREE_CODE (op1a) == SSA_NAME
2131 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2132 && (integer_zerop (op1b) || integer_onep (op1b)))
2134 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2135 || (code1 == NE_EXPR && integer_onep (op1b)));
2136 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2137 switch (gimple_code (stmt))
2139 case GIMPLE_ASSIGN:
2140 /* Try to simplify by copy-propagating the definition. */
2141 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2143 case GIMPLE_PHI:
2144 /* If every argument to the PHI produces the same result when
2145 ANDed with the second comparison, we win.
2146 Do not do this unless the type is bool since we need a bool
2147 result here anyway. */
2148 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2150 tree result = NULL_TREE;
2151 unsigned i;
2152 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2154 tree arg = gimple_phi_arg_def (stmt, i);
2156 /* If this PHI has itself as an argument, ignore it.
2157 If all the other args produce the same result,
2158 we're still OK. */
2159 if (arg == gimple_phi_result (stmt))
2160 continue;
2161 else if (TREE_CODE (arg) == INTEGER_CST)
2163 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2165 if (!result)
2166 result = boolean_false_node;
2167 else if (!integer_zerop (result))
2168 return NULL_TREE;
2170 else if (!result)
2171 result = fold_build2 (code2, boolean_type_node,
2172 op2a, op2b);
2173 else if (!same_bool_comparison_p (result,
2174 code2, op2a, op2b))
2175 return NULL_TREE;
2177 else if (TREE_CODE (arg) == SSA_NAME)
2179 tree temp = and_var_with_comparison (arg, invert,
2180 code2, op2a, op2b);
2181 if (!temp)
2182 return NULL_TREE;
2183 else if (!result)
2184 result = temp;
2185 else if (!same_bool_result_p (result, temp))
2186 return NULL_TREE;
2188 else
2189 return NULL_TREE;
2191 return result;
2194 default:
2195 break;
2198 return NULL_TREE;
2201 /* Try to simplify the AND of two comparisons, specified by
2202 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2203 If this can be simplified to a single expression (without requiring
2204 introducing more SSA variables to hold intermediate values),
2205 return the resulting tree. Otherwise return NULL_TREE.
2206 If the result expression is non-null, it has boolean type. */
2208 tree
2209 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2210 enum tree_code code2, tree op2a, tree op2b)
2212 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2213 if (t)
2214 return t;
2215 else
2216 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2219 /* Helper function for or_comparisons_1: try to simplify the OR of the
2220 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2221 If INVERT is true, invert the value of VAR before doing the OR.
2222 Return NULL_EXPR if we can't simplify this to a single expression. */
2224 static tree
2225 or_var_with_comparison (tree var, bool invert,
2226 enum tree_code code2, tree op2a, tree op2b)
2228 tree t;
2229 gimple stmt = SSA_NAME_DEF_STMT (var);
2231 /* We can only deal with variables whose definitions are assignments. */
2232 if (!is_gimple_assign (stmt))
2233 return NULL_TREE;
2235 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2236 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2237 Then we only have to consider the simpler non-inverted cases. */
2238 if (invert)
2239 t = and_var_with_comparison_1 (stmt,
2240 invert_tree_comparison (code2, false),
2241 op2a, op2b);
2242 else
2243 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2244 return canonicalize_bool (t, invert);
2247 /* Try to simplify the OR of the ssa variable defined by the assignment
2248 STMT with the comparison specified by (OP2A CODE2 OP2B).
2249 Return NULL_EXPR if we can't simplify this to a single expression. */
2251 static tree
2252 or_var_with_comparison_1 (gimple stmt,
2253 enum tree_code code2, tree op2a, tree op2b)
2255 tree var = gimple_assign_lhs (stmt);
2256 tree true_test_var = NULL_TREE;
2257 tree false_test_var = NULL_TREE;
2258 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2260 /* Check for identities like (var OR (var != 0)) => true . */
2261 if (TREE_CODE (op2a) == SSA_NAME
2262 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2264 if ((code2 == NE_EXPR && integer_zerop (op2b))
2265 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2267 true_test_var = op2a;
2268 if (var == true_test_var)
2269 return var;
2271 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2272 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2274 false_test_var = op2a;
2275 if (var == false_test_var)
2276 return boolean_true_node;
2280 /* If the definition is a comparison, recurse on it. */
2281 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2283 tree t = or_comparisons_1 (innercode,
2284 gimple_assign_rhs1 (stmt),
2285 gimple_assign_rhs2 (stmt),
2286 code2,
2287 op2a,
2288 op2b);
2289 if (t)
2290 return t;
2293 /* If the definition is an AND or OR expression, we may be able to
2294 simplify by reassociating. */
2295 if (innercode == TRUTH_AND_EXPR
2296 || innercode == TRUTH_OR_EXPR
2297 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2298 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2300 tree inner1 = gimple_assign_rhs1 (stmt);
2301 tree inner2 = gimple_assign_rhs2 (stmt);
2302 gimple s;
2303 tree t;
2304 tree partial = NULL_TREE;
2305 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2307 /* Check for boolean identities that don't require recursive examination
2308 of inner1/inner2:
2309 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2310 inner1 OR (inner1 AND inner2) => inner1
2311 !inner1 OR (inner1 OR inner2) => true
2312 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2314 if (inner1 == true_test_var)
2315 return (is_or ? var : inner1);
2316 else if (inner2 == true_test_var)
2317 return (is_or ? var : inner2);
2318 else if (inner1 == false_test_var)
2319 return (is_or
2320 ? boolean_true_node
2321 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2322 else if (inner2 == false_test_var)
2323 return (is_or
2324 ? boolean_true_node
2325 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2327 /* Next, redistribute/reassociate the OR across the inner tests.
2328 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2329 if (TREE_CODE (inner1) == SSA_NAME
2330 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2331 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2332 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2333 gimple_assign_rhs1 (s),
2334 gimple_assign_rhs2 (s),
2335 code2, op2a, op2b)))
2337 /* Handle the OR case, where we are reassociating:
2338 (inner1 OR inner2) OR (op2a code2 op2b)
2339 => (t OR inner2)
2340 If the partial result t is a constant, we win. Otherwise
2341 continue on to try reassociating with the other inner test. */
2342 if (is_or)
2344 if (integer_onep (t))
2345 return boolean_true_node;
2346 else if (integer_zerop (t))
2347 return inner2;
2350 /* Handle the AND case, where we are redistributing:
2351 (inner1 AND inner2) OR (op2a code2 op2b)
2352 => (t AND (inner2 OR (op2a code op2b))) */
2353 else if (integer_zerop (t))
2354 return boolean_false_node;
2356 /* Save partial result for later. */
2357 partial = t;
2360 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2361 if (TREE_CODE (inner2) == SSA_NAME
2362 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2363 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2364 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2365 gimple_assign_rhs1 (s),
2366 gimple_assign_rhs2 (s),
2367 code2, op2a, op2b)))
2369 /* Handle the OR case, where we are reassociating:
2370 (inner1 OR inner2) OR (op2a code2 op2b)
2371 => (inner1 OR t)
2372 => (t OR partial) */
2373 if (is_or)
2375 if (integer_zerop (t))
2376 return inner1;
2377 else if (integer_onep (t))
2378 return boolean_true_node;
2379 /* If both are the same, we can apply the identity
2380 (x OR x) == x. */
2381 else if (partial && same_bool_result_p (t, partial))
2382 return t;
2385 /* Handle the AND case, where we are redistributing:
2386 (inner1 AND inner2) OR (op2a code2 op2b)
2387 => (t AND (inner1 OR (op2a code2 op2b)))
2388 => (t AND partial) */
2389 else
2391 if (integer_zerop (t))
2392 return boolean_false_node;
2393 else if (partial)
2395 /* We already got a simplification for the other
2396 operand to the redistributed AND expression. The
2397 interesting case is when at least one is true.
2398 Or, if both are the same, we can apply the identity
2399 (x AND x) == x. */
2400 if (integer_onep (partial))
2401 return t;
2402 else if (integer_onep (t))
2403 return partial;
2404 else if (same_bool_result_p (t, partial))
2405 return t;
2410 return NULL_TREE;
2413 /* Try to simplify the OR of two comparisons defined by
2414 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2415 If this can be done without constructing an intermediate value,
2416 return the resulting tree; otherwise NULL_TREE is returned.
2417 This function is deliberately asymmetric as it recurses on SSA_DEFs
2418 in the first comparison but not the second. */
2420 static tree
2421 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2422 enum tree_code code2, tree op2a, tree op2b)
2424 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2425 if (operand_equal_p (op1a, op2a, 0)
2426 && operand_equal_p (op1b, op2b, 0))
2428 tree t = combine_comparisons (UNKNOWN_LOCATION,
2429 TRUTH_ORIF_EXPR, code1, code2,
2430 boolean_type_node, op1a, op1b);
2431 if (t)
2432 return t;
2435 /* Likewise the swapped case of the above. */
2436 if (operand_equal_p (op1a, op2b, 0)
2437 && operand_equal_p (op1b, op2a, 0))
2439 tree t = combine_comparisons (UNKNOWN_LOCATION,
2440 TRUTH_ORIF_EXPR, code1,
2441 swap_tree_comparison (code2),
2442 boolean_type_node, op1a, op1b);
2443 if (t)
2444 return t;
2447 /* If both comparisons are of the same value against constants, we might
2448 be able to merge them. */
2449 if (operand_equal_p (op1a, op2a, 0)
2450 && TREE_CODE (op1b) == INTEGER_CST
2451 && TREE_CODE (op2b) == INTEGER_CST)
2453 int cmp = tree_int_cst_compare (op1b, op2b);
2455 /* If we have (op1a != op1b), we should either be able to
2456 return that or TRUE, depending on whether the constant op1b
2457 also satisfies the other comparison against op2b. */
2458 if (code1 == NE_EXPR)
2460 bool done = true;
2461 bool val;
2462 switch (code2)
2464 case EQ_EXPR: val = (cmp == 0); break;
2465 case NE_EXPR: val = (cmp != 0); break;
2466 case LT_EXPR: val = (cmp < 0); break;
2467 case GT_EXPR: val = (cmp > 0); break;
2468 case LE_EXPR: val = (cmp <= 0); break;
2469 case GE_EXPR: val = (cmp >= 0); break;
2470 default: done = false;
2472 if (done)
2474 if (val)
2475 return boolean_true_node;
2476 else
2477 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2480 /* Likewise if the second comparison is a != comparison. */
2481 else if (code2 == NE_EXPR)
2483 bool done = true;
2484 bool val;
2485 switch (code1)
2487 case EQ_EXPR: val = (cmp == 0); break;
2488 case NE_EXPR: val = (cmp != 0); break;
2489 case LT_EXPR: val = (cmp > 0); break;
2490 case GT_EXPR: val = (cmp < 0); break;
2491 case LE_EXPR: val = (cmp >= 0); break;
2492 case GE_EXPR: val = (cmp <= 0); break;
2493 default: done = false;
2495 if (done)
2497 if (val)
2498 return boolean_true_node;
2499 else
2500 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2504 /* See if an equality test is redundant with the other comparison. */
2505 else if (code1 == EQ_EXPR)
2507 bool val;
2508 switch (code2)
2510 case EQ_EXPR: val = (cmp == 0); break;
2511 case NE_EXPR: val = (cmp != 0); break;
2512 case LT_EXPR: val = (cmp < 0); break;
2513 case GT_EXPR: val = (cmp > 0); break;
2514 case LE_EXPR: val = (cmp <= 0); break;
2515 case GE_EXPR: val = (cmp >= 0); break;
2516 default:
2517 val = false;
2519 if (val)
2520 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2522 else if (code2 == EQ_EXPR)
2524 bool val;
2525 switch (code1)
2527 case EQ_EXPR: val = (cmp == 0); break;
2528 case NE_EXPR: val = (cmp != 0); break;
2529 case LT_EXPR: val = (cmp > 0); break;
2530 case GT_EXPR: val = (cmp < 0); break;
2531 case LE_EXPR: val = (cmp >= 0); break;
2532 case GE_EXPR: val = (cmp <= 0); break;
2533 default:
2534 val = false;
2536 if (val)
2537 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2540 /* Chose the less restrictive of two < or <= comparisons. */
2541 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2542 && (code2 == LT_EXPR || code2 == LE_EXPR))
2544 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2545 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2546 else
2547 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2550 /* Likewise chose the less restrictive of two > or >= comparisons. */
2551 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2552 && (code2 == GT_EXPR || code2 == GE_EXPR))
2554 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2555 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2556 else
2557 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2560 /* Check for singleton ranges. */
2561 else if (cmp == 0
2562 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2563 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2564 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2566 /* Check for less/greater pairs that don't restrict the range at all. */
2567 else if (cmp >= 0
2568 && (code1 == LT_EXPR || code1 == LE_EXPR)
2569 && (code2 == GT_EXPR || code2 == GE_EXPR))
2570 return boolean_true_node;
2571 else if (cmp <= 0
2572 && (code1 == GT_EXPR || code1 == GE_EXPR)
2573 && (code2 == LT_EXPR || code2 == LE_EXPR))
2574 return boolean_true_node;
2577 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2578 NAME's definition is a truth value. See if there are any simplifications
2579 that can be done against the NAME's definition. */
2580 if (TREE_CODE (op1a) == SSA_NAME
2581 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2582 && (integer_zerop (op1b) || integer_onep (op1b)))
2584 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2585 || (code1 == NE_EXPR && integer_onep (op1b)));
2586 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2587 switch (gimple_code (stmt))
2589 case GIMPLE_ASSIGN:
2590 /* Try to simplify by copy-propagating the definition. */
2591 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2593 case GIMPLE_PHI:
2594 /* If every argument to the PHI produces the same result when
2595 ORed with the second comparison, we win.
2596 Do not do this unless the type is bool since we need a bool
2597 result here anyway. */
2598 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2600 tree result = NULL_TREE;
2601 unsigned i;
2602 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2604 tree arg = gimple_phi_arg_def (stmt, i);
2606 /* If this PHI has itself as an argument, ignore it.
2607 If all the other args produce the same result,
2608 we're still OK. */
2609 if (arg == gimple_phi_result (stmt))
2610 continue;
2611 else if (TREE_CODE (arg) == INTEGER_CST)
2613 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2615 if (!result)
2616 result = boolean_true_node;
2617 else if (!integer_onep (result))
2618 return NULL_TREE;
2620 else if (!result)
2621 result = fold_build2 (code2, boolean_type_node,
2622 op2a, op2b);
2623 else if (!same_bool_comparison_p (result,
2624 code2, op2a, op2b))
2625 return NULL_TREE;
2627 else if (TREE_CODE (arg) == SSA_NAME)
2629 tree temp = or_var_with_comparison (arg, invert,
2630 code2, op2a, op2b);
2631 if (!temp)
2632 return NULL_TREE;
2633 else if (!result)
2634 result = temp;
2635 else if (!same_bool_result_p (result, temp))
2636 return NULL_TREE;
2638 else
2639 return NULL_TREE;
2641 return result;
2644 default:
2645 break;
2648 return NULL_TREE;
2651 /* Try to simplify the OR of two comparisons, specified by
2652 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2653 If this can be simplified to a single expression (without requiring
2654 introducing more SSA variables to hold intermediate values),
2655 return the resulting tree. Otherwise return NULL_TREE.
2656 If the result expression is non-null, it has boolean type. */
2658 tree
2659 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2660 enum tree_code code2, tree op2a, tree op2b)
2662 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2663 if (t)
2664 return t;
2665 else
2666 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2670 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2672 Either NULL_TREE, a simplified but non-constant or a constant
2673 is returned.
2675 ??? This should go into a gimple-fold-inline.h file to be eventually
2676 privatized with the single valueize function used in the various TUs
2677 to avoid the indirect function call overhead. */
2679 tree
2680 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2682 location_t loc = gimple_location (stmt);
2683 switch (gimple_code (stmt))
2685 case GIMPLE_ASSIGN:
2687 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2689 switch (get_gimple_rhs_class (subcode))
2691 case GIMPLE_SINGLE_RHS:
2693 tree rhs = gimple_assign_rhs1 (stmt);
2694 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2696 if (TREE_CODE (rhs) == SSA_NAME)
2698 /* If the RHS is an SSA_NAME, return its known constant value,
2699 if any. */
2700 return (*valueize) (rhs);
2702 /* Handle propagating invariant addresses into address
2703 operations. */
2704 else if (TREE_CODE (rhs) == ADDR_EXPR
2705 && !is_gimple_min_invariant (rhs))
2707 HOST_WIDE_INT offset;
2708 tree base;
2709 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2710 &offset,
2711 valueize);
2712 if (base
2713 && (CONSTANT_CLASS_P (base)
2714 || decl_address_invariant_p (base)))
2715 return build_invariant_address (TREE_TYPE (rhs),
2716 base, offset);
2718 else if (TREE_CODE (rhs) == CONSTRUCTOR
2719 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2720 && (CONSTRUCTOR_NELTS (rhs)
2721 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2723 unsigned i;
2724 tree val, list;
2726 list = NULL_TREE;
2727 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2729 val = (*valueize) (val);
2730 if (TREE_CODE (val) == INTEGER_CST
2731 || TREE_CODE (val) == REAL_CST
2732 || TREE_CODE (val) == FIXED_CST)
2733 list = tree_cons (NULL_TREE, val, list);
2734 else
2735 return NULL_TREE;
2738 return build_vector (TREE_TYPE (rhs), nreverse (list));
2741 if (kind == tcc_reference)
2743 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2744 || TREE_CODE (rhs) == REALPART_EXPR
2745 || TREE_CODE (rhs) == IMAGPART_EXPR)
2746 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2748 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2749 return fold_unary_loc (EXPR_LOCATION (rhs),
2750 TREE_CODE (rhs),
2751 TREE_TYPE (rhs), val);
2753 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2754 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2756 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2757 return fold_ternary_loc (EXPR_LOCATION (rhs),
2758 TREE_CODE (rhs),
2759 TREE_TYPE (rhs), val,
2760 TREE_OPERAND (rhs, 1),
2761 TREE_OPERAND (rhs, 2));
2763 else if (TREE_CODE (rhs) == MEM_REF
2764 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2766 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2767 if (TREE_CODE (val) == ADDR_EXPR
2768 && is_gimple_min_invariant (val))
2770 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2771 unshare_expr (val),
2772 TREE_OPERAND (rhs, 1));
2773 if (tem)
2774 rhs = tem;
2777 return fold_const_aggregate_ref_1 (rhs, valueize);
2779 else if (kind == tcc_declaration)
2780 return get_symbol_constant_value (rhs);
2781 return rhs;
2784 case GIMPLE_UNARY_RHS:
2786 /* Handle unary operators that can appear in GIMPLE form.
2787 Note that we know the single operand must be a constant,
2788 so this should almost always return a simplified RHS. */
2789 tree lhs = gimple_assign_lhs (stmt);
2790 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2792 /* Conversions are useless for CCP purposes if they are
2793 value-preserving. Thus the restrictions that
2794 useless_type_conversion_p places for pointer type conversions
2795 do not apply here. Substitution later will only substitute to
2796 allowed places. */
2797 if (CONVERT_EXPR_CODE_P (subcode)
2798 && POINTER_TYPE_P (TREE_TYPE (lhs))
2799 && POINTER_TYPE_P (TREE_TYPE (op0)))
2801 tree tem;
2802 /* Try to re-construct array references on-the-fly. */
2803 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2804 TREE_TYPE (op0))
2805 && ((tem = maybe_fold_offset_to_address
2806 (loc,
2807 op0, integer_zero_node, TREE_TYPE (lhs)))
2808 != NULL_TREE))
2809 return tem;
2810 return op0;
2813 return
2814 fold_unary_ignore_overflow_loc (loc, subcode,
2815 gimple_expr_type (stmt), op0);
2818 case GIMPLE_BINARY_RHS:
2820 /* Handle binary operators that can appear in GIMPLE form. */
2821 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2822 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2824 /* Translate &x + CST into an invariant form suitable for
2825 further propagation. */
2826 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2827 && TREE_CODE (op0) == ADDR_EXPR
2828 && TREE_CODE (op1) == INTEGER_CST)
2830 tree off = fold_convert (ptr_type_node, op1);
2831 return build_fold_addr_expr
2832 (fold_build2 (MEM_REF,
2833 TREE_TYPE (TREE_TYPE (op0)),
2834 unshare_expr (op0), off));
2837 return fold_binary_loc (loc, subcode,
2838 gimple_expr_type (stmt), op0, op1);
2841 case GIMPLE_TERNARY_RHS:
2843 /* Handle ternary operators that can appear in GIMPLE form. */
2844 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2845 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2846 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2848 return fold_ternary_loc (loc, subcode,
2849 gimple_expr_type (stmt), op0, op1, op2);
2852 default:
2853 gcc_unreachable ();
2857 case GIMPLE_CALL:
2859 tree fn = (*valueize) (gimple_call_fn (stmt));
2860 if (TREE_CODE (fn) == ADDR_EXPR
2861 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2862 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2864 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2865 tree call, retval;
2866 unsigned i;
2867 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2868 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2869 call = build_call_array_loc (loc,
2870 gimple_call_return_type (stmt),
2871 fn, gimple_call_num_args (stmt), args);
2872 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2873 if (retval)
2874 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2875 STRIP_NOPS (retval);
2876 return retval;
2878 return NULL_TREE;
2881 default:
2882 return NULL_TREE;
2886 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2887 Returns NULL_TREE if folding to a constant is not possible, otherwise
2888 returns a constant according to is_gimple_min_invariant. */
2890 tree
2891 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2893 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2894 if (res && is_gimple_min_invariant (res))
2895 return res;
2896 return NULL_TREE;
2900 /* The following set of functions are supposed to fold references using
2901 their constant initializers. */
2903 static tree fold_ctor_reference (tree type, tree ctor,
2904 unsigned HOST_WIDE_INT offset,
2905 unsigned HOST_WIDE_INT size);
2907 /* See if we can find constructor defining value of BASE.
2908 When we know the consructor with constant offset (such as
2909 base is array[40] and we do know constructor of array), then
2910 BIT_OFFSET is adjusted accordingly.
2912 As a special case, return error_mark_node when constructor
2913 is not explicitly available, but it is known to be zero
2914 such as 'static const int a;'. */
2915 static tree
2916 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2917 tree (*valueize)(tree))
2919 HOST_WIDE_INT bit_offset2, size, max_size;
2920 if (TREE_CODE (base) == MEM_REF)
2922 if (!integer_zerop (TREE_OPERAND (base, 1)))
2924 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2925 return NULL_TREE;
2926 *bit_offset += (mem_ref_offset (base).low
2927 * BITS_PER_UNIT);
2930 if (valueize
2931 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2932 base = valueize (TREE_OPERAND (base, 0));
2933 if (!base || TREE_CODE (base) != ADDR_EXPR)
2934 return NULL_TREE;
2935 base = TREE_OPERAND (base, 0);
2938 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2939 DECL_INITIAL. If BASE is a nested reference into another
2940 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2941 the inner reference. */
2942 switch (TREE_CODE (base))
2944 case VAR_DECL:
2945 if (!const_value_known_p (base))
2946 return NULL_TREE;
2948 /* Fallthru. */
2949 case CONST_DECL:
2950 if (!DECL_INITIAL (base)
2951 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2952 return error_mark_node;
2953 return DECL_INITIAL (base);
2955 case ARRAY_REF:
2956 case COMPONENT_REF:
2957 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2958 if (max_size == -1 || size != max_size)
2959 return NULL_TREE;
2960 *bit_offset += bit_offset2;
2961 return get_base_constructor (base, bit_offset, valueize);
2963 case STRING_CST:
2964 case CONSTRUCTOR:
2965 return base;
2967 default:
2968 return NULL_TREE;
2972 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2973 to the memory at bit OFFSET.
2975 We do only simple job of folding byte accesses. */
2977 static tree
2978 fold_string_cst_ctor_reference (tree type, tree ctor,
2979 unsigned HOST_WIDE_INT offset,
2980 unsigned HOST_WIDE_INT size)
2982 if (INTEGRAL_TYPE_P (type)
2983 && (TYPE_MODE (type)
2984 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2985 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2986 == MODE_INT)
2987 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2988 && size == BITS_PER_UNIT
2989 && !(offset % BITS_PER_UNIT))
2991 offset /= BITS_PER_UNIT;
2992 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2993 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2994 [offset]));
2995 /* Folding
2996 const char a[20]="hello";
2997 return a[10];
2999 might lead to offset greater than string length. In this case we
3000 know value is either initialized to 0 or out of bounds. Return 0
3001 in both cases. */
3002 return build_zero_cst (type);
3004 return NULL_TREE;
3007 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3008 SIZE to the memory at bit OFFSET. */
3010 static tree
3011 fold_array_ctor_reference (tree type, tree ctor,
3012 unsigned HOST_WIDE_INT offset,
3013 unsigned HOST_WIDE_INT size)
3015 unsigned HOST_WIDE_INT cnt;
3016 tree cfield, cval;
3017 double_int low_bound, elt_size;
3018 double_int index, max_index;
3019 double_int access_index;
3020 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
3021 HOST_WIDE_INT inner_offset;
3023 /* Compute low bound and elt size. */
3024 if (domain_type && TYPE_MIN_VALUE (domain_type))
3026 /* Static constructors for variably sized objects makes no sense. */
3027 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
3028 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
3030 else
3031 low_bound = double_int_zero;
3032 /* Static constructors for variably sized objects makes no sense. */
3033 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
3034 == INTEGER_CST);
3035 elt_size =
3036 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
3039 /* We can handle only constantly sized accesses that are known to not
3040 be larger than size of array element. */
3041 if (!TYPE_SIZE_UNIT (type)
3042 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
3043 || double_int_cmp (elt_size,
3044 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
3045 return NULL_TREE;
3047 /* Compute the array index we look for. */
3048 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
3049 elt_size, TRUNC_DIV_EXPR);
3050 access_index = double_int_add (access_index, low_bound);
3052 /* And offset within the access. */
3053 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
3055 /* See if the array field is large enough to span whole access. We do not
3056 care to fold accesses spanning multiple array indexes. */
3057 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
3058 return NULL_TREE;
3060 index = double_int_sub (low_bound, double_int_one);
3061 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
3063 /* Array constructor might explicitely set index, or specify range
3064 or leave index NULL meaning that it is next index after previous
3065 one. */
3066 if (cfield)
3068 if (TREE_CODE (cfield) == INTEGER_CST)
3069 max_index = index = tree_to_double_int (cfield);
3070 else
3072 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3073 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3074 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3077 else
3078 max_index = index = double_int_add (index, double_int_one);
3080 /* Do we have match? */
3081 if (double_int_cmp (access_index, index, 1) >= 0
3082 && double_int_cmp (access_index, max_index, 1) <= 0)
3083 return fold_ctor_reference (type, cval, inner_offset, size);
3085 /* When memory is not explicitely mentioned in constructor,
3086 it is 0 (or out of range). */
3087 return build_zero_cst (type);
3090 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3091 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3093 static tree
3094 fold_nonarray_ctor_reference (tree type, tree ctor,
3095 unsigned HOST_WIDE_INT offset,
3096 unsigned HOST_WIDE_INT size)
3098 unsigned HOST_WIDE_INT cnt;
3099 tree cfield, cval;
3101 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3102 cval)
3104 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3105 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3106 tree field_size = DECL_SIZE (cfield);
3107 double_int bitoffset;
3108 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3109 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
3110 double_int bitoffset_end;
3112 /* Variable sized objects in static constructors makes no sense,
3113 but field_size can be NULL for flexible array members. */
3114 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3115 && TREE_CODE (byte_offset) == INTEGER_CST
3116 && (field_size != NULL_TREE
3117 ? TREE_CODE (field_size) == INTEGER_CST
3118 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3120 /* Compute bit offset of the field. */
3121 bitoffset = double_int_add (tree_to_double_int (field_offset),
3122 double_int_mul (byte_offset_cst,
3123 bits_per_unit_cst));
3124 /* Compute bit offset where the field ends. */
3125 if (field_size != NULL_TREE)
3126 bitoffset_end = double_int_add (bitoffset,
3127 tree_to_double_int (field_size));
3128 else
3129 bitoffset_end = double_int_zero;
3131 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
3132 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
3133 && (field_size == NULL_TREE
3134 || double_int_cmp (uhwi_to_double_int (offset),
3135 bitoffset_end, 0) < 0))
3137 double_int access_end = double_int_add (uhwi_to_double_int (offset),
3138 uhwi_to_double_int (size));
3139 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
3140 bitoffset);
3141 /* We do have overlap. Now see if field is large enough to
3142 cover the access. Give up for accesses spanning multiple
3143 fields. */
3144 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
3145 return NULL_TREE;
3146 return fold_ctor_reference (type, cval,
3147 double_int_to_uhwi (inner_offset), size);
3150 /* When memory is not explicitely mentioned in constructor, it is 0. */
3151 return build_zero_cst (type);
3154 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3155 to the memory at bit OFFSET. */
3157 static tree
3158 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3159 unsigned HOST_WIDE_INT size)
3161 tree ret;
3163 /* We found the field with exact match. */
3164 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3165 && !offset)
3166 return canonicalize_constructor_val (ctor);
3168 /* We are at the end of walk, see if we can view convert the
3169 result. */
3170 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3171 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3172 && operand_equal_p (TYPE_SIZE (type),
3173 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3175 ret = canonicalize_constructor_val (ctor);
3176 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3177 if (ret)
3178 STRIP_NOPS (ret);
3179 return ret;
3181 if (TREE_CODE (ctor) == STRING_CST)
3182 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3183 if (TREE_CODE (ctor) == CONSTRUCTOR)
3186 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
3187 return fold_array_ctor_reference (type, ctor, offset, size);
3188 else
3189 return fold_nonarray_ctor_reference (type, ctor, offset, size);
3192 return NULL_TREE;
3195 /* Return the tree representing the element referenced by T if T is an
3196 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3197 names using VALUEIZE. Return NULL_TREE otherwise. */
3199 tree
3200 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3202 tree ctor, idx, base;
3203 HOST_WIDE_INT offset, size, max_size;
3204 tree tem;
3206 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3207 return get_symbol_constant_value (t);
3209 tem = fold_read_from_constant_string (t);
3210 if (tem)
3211 return tem;
3213 switch (TREE_CODE (t))
3215 case ARRAY_REF:
3216 case ARRAY_RANGE_REF:
3217 /* Constant indexes are handled well by get_base_constructor.
3218 Only special case variable offsets.
3219 FIXME: This code can't handle nested references with variable indexes
3220 (they will be handled only by iteration of ccp). Perhaps we can bring
3221 get_ref_base_and_extent here and make it use a valueize callback. */
3222 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3223 && valueize
3224 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3225 && host_integerp (idx, 0))
3227 tree low_bound, unit_size;
3229 /* If the resulting bit-offset is constant, track it. */
3230 if ((low_bound = array_ref_low_bound (t),
3231 host_integerp (low_bound, 0))
3232 && (unit_size = array_ref_element_size (t),
3233 host_integerp (unit_size, 1)))
3235 offset = TREE_INT_CST_LOW (idx);
3236 offset -= TREE_INT_CST_LOW (low_bound);
3237 offset *= TREE_INT_CST_LOW (unit_size);
3238 offset *= BITS_PER_UNIT;
3240 base = TREE_OPERAND (t, 0);
3241 ctor = get_base_constructor (base, &offset, valueize);
3242 /* Empty constructor. Always fold to 0. */
3243 if (ctor == error_mark_node)
3244 return build_zero_cst (TREE_TYPE (t));
3245 /* Out of bound array access. Value is undefined,
3246 but don't fold. */
3247 if (offset < 0)
3248 return NULL_TREE;
3249 /* We can not determine ctor. */
3250 if (!ctor)
3251 return NULL_TREE;
3252 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3253 TREE_INT_CST_LOW (unit_size)
3254 * BITS_PER_UNIT);
3257 /* Fallthru. */
3259 case COMPONENT_REF:
3260 case BIT_FIELD_REF:
3261 case TARGET_MEM_REF:
3262 case MEM_REF:
3263 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3264 ctor = get_base_constructor (base, &offset, valueize);
3266 /* Empty constructor. Always fold to 0. */
3267 if (ctor == error_mark_node)
3268 return build_zero_cst (TREE_TYPE (t));
3269 /* We do not know precise address. */
3270 if (max_size == -1 || max_size != size)
3271 return NULL_TREE;
3272 /* We can not determine ctor. */
3273 if (!ctor)
3274 return NULL_TREE;
3276 /* Out of bound array access. Value is undefined, but don't fold. */
3277 if (offset < 0)
3278 return NULL_TREE;
3280 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3282 case REALPART_EXPR:
3283 case IMAGPART_EXPR:
3285 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3286 if (c && TREE_CODE (c) == COMPLEX_CST)
3287 return fold_build1_loc (EXPR_LOCATION (t),
3288 TREE_CODE (t), TREE_TYPE (t), c);
3289 break;
3292 default:
3293 break;
3296 return NULL_TREE;
3299 tree
3300 fold_const_aggregate_ref (tree t)
3302 return fold_const_aggregate_ref_1 (t, NULL);