2013-11-20 Jan-Benedict Glaw <jbglaw@lug-owl.de>
[official-gcc.git] / gcc / gimple-fold.c
blob5493c5f3ea0ca4c729f2b80c283746c8e723486e
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "dumpfile.h"
33 #include "bitmap.h"
34 #include "gimple.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimple-ssa.h"
38 #include "tree-ssanames.h"
39 #include "tree-into-ssa.h"
40 #include "tree-dfa.h"
41 #include "tree-ssa.h"
42 #include "tree-ssa-propagate.h"
43 #include "target.h"
44 #include "ipa-utils.h"
45 #include "gimple-pretty-print.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
49 /* Return true when DECL can be referenced from current unit.
50 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
51 We can get declarations that are not possible to reference for various
52 reasons:
54 1) When analyzing C++ virtual tables.
55 C++ virtual tables do have known constructors even
56 when they are keyed to other compilation unit.
57 Those tables can contain pointers to methods and vars
58 in other units. Those methods have both STATIC and EXTERNAL
59 set.
60 2) In WHOPR mode devirtualization might lead to reference
61 to method that was partitioned elsehwere.
62 In this case we have static VAR_DECL or FUNCTION_DECL
63 that has no corresponding callgraph/varpool node
64 declaring the body.
65 3) COMDAT functions referred by external vtables that
66 we devirtualize only during final compilation stage.
67 At this time we already decided that we will not output
68 the function body and thus we can't reference the symbol
69 directly. */
71 static bool
72 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
74 struct varpool_node *vnode;
75 struct cgraph_node *node;
76 symtab_node *snode;
78 if (DECL_ABSTRACT (decl))
79 return false;
81 /* We are concerned only about static/external vars and functions. */
82 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
83 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
84 return true;
86 /* Static objects can be referred only if they was not optimized out yet. */
87 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
89 snode = symtab_get_node (decl);
90 if (!snode)
91 return false;
92 node = dyn_cast <cgraph_node> (snode);
93 return !node || !node->global.inlined_to;
96 /* We will later output the initializer, so we can refer to it.
97 So we are concerned only when DECL comes from initializer of
98 external var. */
99 if (!from_decl
100 || TREE_CODE (from_decl) != VAR_DECL
101 || !DECL_EXTERNAL (from_decl)
102 || (flag_ltrans
103 && symtab_get_node (from_decl)->in_other_partition))
104 return true;
105 /* We are folding reference from external vtable. The vtable may reffer
106 to a symbol keyed to other compilation unit. The other compilation
107 unit may be in separate DSO and the symbol may be hidden. */
108 if (DECL_VISIBILITY_SPECIFIED (decl)
109 && DECL_EXTERNAL (decl)
110 && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
111 return false;
112 /* When function is public, we always can introduce new reference.
113 Exception are the COMDAT functions where introducing a direct
114 reference imply need to include function body in the curren tunit. */
115 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
116 return true;
117 /* We are not at ltrans stage; so don't worry about WHOPR.
118 Also when still gimplifying all referred comdat functions will be
119 produced.
121 As observed in PR20991 for already optimized out comdat virtual functions
122 it may be tempting to not necessarily give up because the copy will be
123 output elsewhere when corresponding vtable is output.
124 This is however not possible - ABI specify that COMDATs are output in
125 units where they are used and when the other unit was compiled with LTO
126 it is possible that vtable was kept public while the function itself
127 was privatized. */
128 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
129 return true;
131 /* OK we are seeing either COMDAT or static variable. In this case we must
132 check that the definition is still around so we can refer it. */
133 if (TREE_CODE (decl) == FUNCTION_DECL)
135 node = cgraph_get_node (decl);
136 /* Check that we still have function body and that we didn't took
137 the decision to eliminate offline copy of the function yet.
138 The second is important when devirtualization happens during final
139 compilation stage when making a new reference no longer makes callee
140 to be compiled. */
141 if (!node || !node->definition || node->global.inlined_to)
143 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
144 return false;
147 else if (TREE_CODE (decl) == VAR_DECL)
149 vnode = varpool_get_node (decl);
150 if (!vnode || !vnode->definition)
152 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
153 return false;
156 return true;
159 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
163 tree
164 canonicalize_constructor_val (tree cval, tree from_decl)
166 tree orig_cval = cval;
167 STRIP_NOPS (cval);
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 ptr,
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
180 if (TREE_CODE (cval) == ADDR_EXPR)
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 if (base)
187 TREE_OPERAND (cval, 0) = base;
189 else
190 base = get_base_address (TREE_OPERAND (cval, 0));
191 if (!base)
192 return NULL_TREE;
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
196 && !can_refer_decl_in_current_unit_p (base, from_decl))
197 return NULL_TREE;
198 if (TREE_CODE (base) == VAR_DECL)
199 TREE_ADDRESSABLE (base) = 1;
200 else if (TREE_CODE (base) == FUNCTION_DECL)
202 /* Make sure we create a cgraph node for functions we'll reference.
203 They can be non-existent if the reference comes from an entry
204 of an external vtable for example. */
205 cgraph_get_create_node (base);
207 /* Fixup types in global initializers. */
208 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
209 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
211 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
212 cval = fold_convert (TREE_TYPE (orig_cval), cval);
213 return cval;
215 if (TREE_OVERFLOW_P (cval))
216 return drop_tree_overflow (cval);
217 return orig_cval;
220 /* If SYM is a constant variable with known value, return the value.
221 NULL_TREE is returned otherwise. */
223 tree
224 get_symbol_constant_value (tree sym)
226 tree val = ctor_for_folding (sym);
227 if (val != error_mark_node)
229 if (val)
231 val = canonicalize_constructor_val (unshare_expr (val), sym);
232 if (val && is_gimple_min_invariant (val))
233 return val;
234 else
235 return NULL_TREE;
237 /* Variables declared 'const' without an initializer
238 have zero as the initializer if they may not be
239 overridden at link or run time. */
240 if (!val
241 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
242 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
243 return build_zero_cst (TREE_TYPE (sym));
246 return NULL_TREE;
251 /* Subroutine of fold_stmt. We perform several simplifications of the
252 memory reference tree EXPR and make sure to re-gimplify them properly
253 after propagation of constant addresses. IS_LHS is true if the
254 reference is supposed to be an lvalue. */
256 static tree
257 maybe_fold_reference (tree expr, bool is_lhs)
259 tree *t = &expr;
260 tree result;
262 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
263 || TREE_CODE (expr) == REALPART_EXPR
264 || TREE_CODE (expr) == IMAGPART_EXPR)
265 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
266 return fold_unary_loc (EXPR_LOCATION (expr),
267 TREE_CODE (expr),
268 TREE_TYPE (expr),
269 TREE_OPERAND (expr, 0));
270 else if (TREE_CODE (expr) == BIT_FIELD_REF
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272 return fold_ternary_loc (EXPR_LOCATION (expr),
273 TREE_CODE (expr),
274 TREE_TYPE (expr),
275 TREE_OPERAND (expr, 0),
276 TREE_OPERAND (expr, 1),
277 TREE_OPERAND (expr, 2));
279 while (handled_component_p (*t))
280 t = &TREE_OPERAND (*t, 0);
282 /* Canonicalize MEM_REFs invariant address operand. Do this first
283 to avoid feeding non-canonical MEM_REFs elsewhere. */
284 if (TREE_CODE (*t) == MEM_REF
285 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
287 bool volatile_p = TREE_THIS_VOLATILE (*t);
288 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
289 TREE_OPERAND (*t, 0),
290 TREE_OPERAND (*t, 1));
291 if (tem)
293 TREE_THIS_VOLATILE (tem) = volatile_p;
294 *t = tem;
295 tem = maybe_fold_reference (expr, is_lhs);
296 if (tem)
297 return tem;
298 return expr;
302 if (!is_lhs
303 && (result = fold_const_aggregate_ref (expr))
304 && is_gimple_min_invariant (result))
305 return result;
307 /* Fold back MEM_REFs to reference trees. */
308 if (TREE_CODE (*t) == MEM_REF
309 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
310 && integer_zerop (TREE_OPERAND (*t, 1))
311 && (TREE_THIS_VOLATILE (*t)
312 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
313 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
314 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
315 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
316 /* We have to look out here to not drop a required conversion
317 from the rhs to the lhs if is_lhs, but we don't have the
318 rhs here to verify that. Thus require strict type
319 compatibility. */
320 && types_compatible_p (TREE_TYPE (*t),
321 TREE_TYPE (TREE_OPERAND
322 (TREE_OPERAND (*t, 0), 0))))
324 tree tem;
325 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
326 tem = maybe_fold_reference (expr, is_lhs);
327 if (tem)
328 return tem;
329 return expr;
331 else if (TREE_CODE (*t) == TARGET_MEM_REF)
333 tree tem = maybe_fold_tmr (*t);
334 if (tem)
336 *t = tem;
337 tem = maybe_fold_reference (expr, is_lhs);
338 if (tem)
339 return tem;
340 return expr;
344 return NULL_TREE;
348 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
349 replacement rhs for the statement or NULL_TREE if no simplification
350 could be made. It is assumed that the operands have been previously
351 folded. */
353 static tree
354 fold_gimple_assign (gimple_stmt_iterator *si)
356 gimple stmt = gsi_stmt (*si);
357 enum tree_code subcode = gimple_assign_rhs_code (stmt);
358 location_t loc = gimple_location (stmt);
360 tree result = NULL_TREE;
362 switch (get_gimple_rhs_class (subcode))
364 case GIMPLE_SINGLE_RHS:
366 tree rhs = gimple_assign_rhs1 (stmt);
368 if (REFERENCE_CLASS_P (rhs))
369 return maybe_fold_reference (rhs, false);
371 else if (TREE_CODE (rhs) == ADDR_EXPR)
373 tree ref = TREE_OPERAND (rhs, 0);
374 tree tem = maybe_fold_reference (ref, true);
375 if (tem
376 && TREE_CODE (tem) == MEM_REF
377 && integer_zerop (TREE_OPERAND (tem, 1)))
378 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
379 else if (tem)
380 result = fold_convert (TREE_TYPE (rhs),
381 build_fold_addr_expr_loc (loc, tem));
382 else if (TREE_CODE (ref) == MEM_REF
383 && integer_zerop (TREE_OPERAND (ref, 1)))
384 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
387 else if (TREE_CODE (rhs) == CONSTRUCTOR
388 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
389 && (CONSTRUCTOR_NELTS (rhs)
390 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
392 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
393 unsigned i;
394 tree val;
396 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
397 if (TREE_CODE (val) != INTEGER_CST
398 && TREE_CODE (val) != REAL_CST
399 && TREE_CODE (val) != FIXED_CST)
400 return NULL_TREE;
402 return build_vector_from_ctor (TREE_TYPE (rhs),
403 CONSTRUCTOR_ELTS (rhs));
406 else if (DECL_P (rhs))
407 return get_symbol_constant_value (rhs);
409 /* If we couldn't fold the RHS, hand over to the generic
410 fold routines. */
411 if (result == NULL_TREE)
412 result = fold (rhs);
414 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
415 that may have been added by fold, and "useless" type
416 conversions that might now be apparent due to propagation. */
417 STRIP_USELESS_TYPE_CONVERSION (result);
419 if (result != rhs && valid_gimple_rhs_p (result))
420 return result;
422 return NULL_TREE;
424 break;
426 case GIMPLE_UNARY_RHS:
428 tree rhs = gimple_assign_rhs1 (stmt);
430 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
431 if (result)
433 /* If the operation was a conversion do _not_ mark a
434 resulting constant with TREE_OVERFLOW if the original
435 constant was not. These conversions have implementation
436 defined behavior and retaining the TREE_OVERFLOW flag
437 here would confuse later passes such as VRP. */
438 if (CONVERT_EXPR_CODE_P (subcode)
439 && TREE_CODE (result) == INTEGER_CST
440 && TREE_CODE (rhs) == INTEGER_CST)
441 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
443 STRIP_USELESS_TYPE_CONVERSION (result);
444 if (valid_gimple_rhs_p (result))
445 return result;
448 break;
450 case GIMPLE_BINARY_RHS:
451 /* Try to canonicalize for boolean-typed X the comparisons
452 X == 0, X == 1, X != 0, and X != 1. */
453 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
454 || gimple_assign_rhs_code (stmt) == NE_EXPR)
456 tree lhs = gimple_assign_lhs (stmt);
457 tree op1 = gimple_assign_rhs1 (stmt);
458 tree op2 = gimple_assign_rhs2 (stmt);
459 tree type = TREE_TYPE (op1);
461 /* Check whether the comparison operands are of the same boolean
462 type as the result type is.
463 Check that second operand is an integer-constant with value
464 one or zero. */
465 if (TREE_CODE (op2) == INTEGER_CST
466 && (integer_zerop (op2) || integer_onep (op2))
467 && useless_type_conversion_p (TREE_TYPE (lhs), type))
469 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
470 bool is_logical_not = false;
472 /* X == 0 and X != 1 is a logical-not.of X
473 X == 1 and X != 0 is X */
474 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
475 || (cmp_code == NE_EXPR && integer_onep (op2)))
476 is_logical_not = true;
478 if (is_logical_not == false)
479 result = op1;
480 /* Only for one-bit precision typed X the transformation
481 !X -> ~X is valied. */
482 else if (TYPE_PRECISION (type) == 1)
483 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
484 type, op1);
485 /* Otherwise we use !X -> X ^ 1. */
486 else
487 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
488 type, op1, build_int_cst (type, 1));
493 if (!result)
494 result = fold_binary_loc (loc, subcode,
495 TREE_TYPE (gimple_assign_lhs (stmt)),
496 gimple_assign_rhs1 (stmt),
497 gimple_assign_rhs2 (stmt));
499 if (result)
501 STRIP_USELESS_TYPE_CONVERSION (result);
502 if (valid_gimple_rhs_p (result))
503 return result;
505 break;
507 case GIMPLE_TERNARY_RHS:
508 /* Try to fold a conditional expression. */
509 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
511 tree op0 = gimple_assign_rhs1 (stmt);
512 tree tem;
513 bool set = false;
514 location_t cond_loc = gimple_location (stmt);
516 if (COMPARISON_CLASS_P (op0))
518 fold_defer_overflow_warnings ();
519 tem = fold_binary_loc (cond_loc,
520 TREE_CODE (op0), TREE_TYPE (op0),
521 TREE_OPERAND (op0, 0),
522 TREE_OPERAND (op0, 1));
523 /* This is actually a conditional expression, not a GIMPLE
524 conditional statement, however, the valid_gimple_rhs_p
525 test still applies. */
526 set = (tem && is_gimple_condexpr (tem)
527 && valid_gimple_rhs_p (tem));
528 fold_undefer_overflow_warnings (set, stmt, 0);
530 else if (is_gimple_min_invariant (op0))
532 tem = op0;
533 set = true;
535 else
536 return NULL_TREE;
538 if (set)
539 result = fold_build3_loc (cond_loc, COND_EXPR,
540 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
541 gimple_assign_rhs2 (stmt),
542 gimple_assign_rhs3 (stmt));
545 if (!result)
546 result = fold_ternary_loc (loc, subcode,
547 TREE_TYPE (gimple_assign_lhs (stmt)),
548 gimple_assign_rhs1 (stmt),
549 gimple_assign_rhs2 (stmt),
550 gimple_assign_rhs3 (stmt));
552 if (result)
554 STRIP_USELESS_TYPE_CONVERSION (result);
555 if (valid_gimple_rhs_p (result))
556 return result;
558 break;
560 case GIMPLE_INVALID_RHS:
561 gcc_unreachable ();
564 return NULL_TREE;
567 /* Attempt to fold a conditional statement. Return true if any changes were
568 made. We only attempt to fold the condition expression, and do not perform
569 any transformation that would require alteration of the cfg. It is
570 assumed that the operands have been previously folded. */
572 static bool
573 fold_gimple_cond (gimple stmt)
575 tree result = fold_binary_loc (gimple_location (stmt),
576 gimple_cond_code (stmt),
577 boolean_type_node,
578 gimple_cond_lhs (stmt),
579 gimple_cond_rhs (stmt));
581 if (result)
583 STRIP_USELESS_TYPE_CONVERSION (result);
584 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
586 gimple_cond_set_condition_from_tree (stmt, result);
587 return true;
591 return false;
594 /* Convert EXPR into a GIMPLE value suitable for substitution on the
595 RHS of an assignment. Insert the necessary statements before
596 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
597 is replaced. If the call is expected to produces a result, then it
598 is replaced by an assignment of the new RHS to the result variable.
599 If the result is to be ignored, then the call is replaced by a
600 GIMPLE_NOP. A proper VDEF chain is retained by making the first
601 VUSE and the last VDEF of the whole sequence be the same as the replaced
602 statement and using new SSA names for stores in between. */
604 void
605 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
607 tree lhs;
608 gimple stmt, new_stmt;
609 gimple_stmt_iterator i;
610 gimple_seq stmts = NULL;
611 struct gimplify_ctx gctx;
612 gimple laststore;
613 tree reaching_vuse;
615 stmt = gsi_stmt (*si_p);
617 gcc_assert (is_gimple_call (stmt));
619 push_gimplify_context (&gctx);
620 gctx.into_ssa = gimple_in_ssa_p (cfun);
622 lhs = gimple_call_lhs (stmt);
623 if (lhs == NULL_TREE)
625 gimplify_and_add (expr, &stmts);
626 /* We can end up with folding a memcpy of an empty class assignment
627 which gets optimized away by C++ gimplification. */
628 if (gimple_seq_empty_p (stmts))
630 pop_gimplify_context (NULL);
631 if (gimple_in_ssa_p (cfun))
633 unlink_stmt_vdef (stmt);
634 release_defs (stmt);
636 gsi_replace (si_p, gimple_build_nop (), true);
637 return;
640 else
642 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
643 new_stmt = gimple_build_assign (lhs, tmp);
644 i = gsi_last (stmts);
645 gsi_insert_after_without_update (&i, new_stmt,
646 GSI_CONTINUE_LINKING);
649 pop_gimplify_context (NULL);
651 if (gimple_has_location (stmt))
652 annotate_all_with_location (stmts, gimple_location (stmt));
654 /* First iterate over the replacement statements backward, assigning
655 virtual operands to their defining statements. */
656 laststore = NULL;
657 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
659 new_stmt = gsi_stmt (i);
660 if ((gimple_assign_single_p (new_stmt)
661 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
662 || (is_gimple_call (new_stmt)
663 && (gimple_call_flags (new_stmt)
664 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
666 tree vdef;
667 if (!laststore)
668 vdef = gimple_vdef (stmt);
669 else
670 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
671 gimple_set_vdef (new_stmt, vdef);
672 if (vdef && TREE_CODE (vdef) == SSA_NAME)
673 SSA_NAME_DEF_STMT (vdef) = new_stmt;
674 laststore = new_stmt;
678 /* Second iterate over the statements forward, assigning virtual
679 operands to their uses. */
680 reaching_vuse = gimple_vuse (stmt);
681 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
683 new_stmt = gsi_stmt (i);
684 /* If the new statement possibly has a VUSE, update it with exact SSA
685 name we know will reach this one. */
686 if (gimple_has_mem_ops (new_stmt))
687 gimple_set_vuse (new_stmt, reaching_vuse);
688 gimple_set_modified (new_stmt, true);
689 if (gimple_vdef (new_stmt))
690 reaching_vuse = gimple_vdef (new_stmt);
693 /* If the new sequence does not do a store release the virtual
694 definition of the original statement. */
695 if (reaching_vuse
696 && reaching_vuse == gimple_vuse (stmt))
698 tree vdef = gimple_vdef (stmt);
699 if (vdef
700 && TREE_CODE (vdef) == SSA_NAME)
702 unlink_stmt_vdef (stmt);
703 release_ssa_name (vdef);
707 /* Finally replace the original statement with the sequence. */
708 gsi_replace_with_seq (si_p, stmts, false);
711 /* Return the string length, maximum string length or maximum value of
712 ARG in LENGTH.
713 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
714 is not NULL and, for TYPE == 0, its value is not equal to the length
715 we determine or if we are unable to determine the length or value,
716 return false. VISITED is a bitmap of visited variables.
717 TYPE is 0 if string length should be returned, 1 for maximum string
718 length and 2 for maximum value ARG can have. */
720 static bool
721 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
723 tree var, val;
724 gimple def_stmt;
726 if (TREE_CODE (arg) != SSA_NAME)
728 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
729 if (TREE_CODE (arg) == ADDR_EXPR
730 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
731 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
733 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
734 if (TREE_CODE (aop0) == INDIRECT_REF
735 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
736 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
737 length, visited, type);
740 if (type == 2)
742 val = arg;
743 if (TREE_CODE (val) != INTEGER_CST
744 || tree_int_cst_sgn (val) < 0)
745 return false;
747 else
748 val = c_strlen (arg, 1);
749 if (!val)
750 return false;
752 if (*length)
754 if (type > 0)
756 if (TREE_CODE (*length) != INTEGER_CST
757 || TREE_CODE (val) != INTEGER_CST)
758 return false;
760 if (tree_int_cst_lt (*length, val))
761 *length = val;
762 return true;
764 else if (simple_cst_equal (val, *length) != 1)
765 return false;
768 *length = val;
769 return true;
772 /* If ARG is registered for SSA update we cannot look at its defining
773 statement. */
774 if (name_registered_for_update_p (arg))
775 return false;
777 /* If we were already here, break the infinite cycle. */
778 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
779 return true;
781 var = arg;
782 def_stmt = SSA_NAME_DEF_STMT (var);
784 switch (gimple_code (def_stmt))
786 case GIMPLE_ASSIGN:
787 /* The RHS of the statement defining VAR must either have a
788 constant length or come from another SSA_NAME with a constant
789 length. */
790 if (gimple_assign_single_p (def_stmt)
791 || gimple_assign_unary_nop_p (def_stmt))
793 tree rhs = gimple_assign_rhs1 (def_stmt);
794 return get_maxval_strlen (rhs, length, visited, type);
796 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
798 tree op2 = gimple_assign_rhs2 (def_stmt);
799 tree op3 = gimple_assign_rhs3 (def_stmt);
800 return get_maxval_strlen (op2, length, visited, type)
801 && get_maxval_strlen (op3, length, visited, type);
803 return false;
805 case GIMPLE_PHI:
807 /* All the arguments of the PHI node must have the same constant
808 length. */
809 unsigned i;
811 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
813 tree arg = gimple_phi_arg (def_stmt, i)->def;
815 /* If this PHI has itself as an argument, we cannot
816 determine the string length of this argument. However,
817 if we can find a constant string length for the other
818 PHI args then we can still be sure that this is a
819 constant string length. So be optimistic and just
820 continue with the next argument. */
821 if (arg == gimple_phi_result (def_stmt))
822 continue;
824 if (!get_maxval_strlen (arg, length, visited, type))
825 return false;
828 return true;
830 default:
831 return false;
836 /* Fold builtin call in statement STMT. Returns a simplified tree.
837 We may return a non-constant expression, including another call
838 to a different function and with different arguments, e.g.,
839 substituting memcpy for strcpy when the string length is known.
840 Note that some builtins expand into inline code that may not
841 be valid in GIMPLE. Callers must take care. */
843 tree
844 gimple_fold_builtin (gimple stmt)
846 tree result, val[3];
847 tree callee, a;
848 int arg_idx, type;
849 bitmap visited;
850 bool ignore;
851 int nargs;
852 location_t loc = gimple_location (stmt);
854 gcc_assert (is_gimple_call (stmt));
856 ignore = (gimple_call_lhs (stmt) == NULL);
858 /* First try the generic builtin folder. If that succeeds, return the
859 result directly. */
860 result = fold_call_stmt (stmt, ignore);
861 if (result)
863 if (ignore)
864 STRIP_NOPS (result);
865 return result;
868 /* Ignore MD builtins. */
869 callee = gimple_call_fndecl (stmt);
870 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
871 return NULL_TREE;
873 /* Give up for always_inline inline builtins until they are
874 inlined. */
875 if (avoid_folding_inline_builtin (callee))
876 return NULL_TREE;
878 /* If the builtin could not be folded, and it has no argument list,
879 we're done. */
880 nargs = gimple_call_num_args (stmt);
881 if (nargs == 0)
882 return NULL_TREE;
884 /* Limit the work only for builtins we know how to simplify. */
885 switch (DECL_FUNCTION_CODE (callee))
887 case BUILT_IN_STRLEN:
888 case BUILT_IN_FPUTS:
889 case BUILT_IN_FPUTS_UNLOCKED:
890 arg_idx = 0;
891 type = 0;
892 break;
893 case BUILT_IN_STRCPY:
894 case BUILT_IN_STRNCPY:
895 arg_idx = 1;
896 type = 0;
897 break;
898 case BUILT_IN_MEMCPY_CHK:
899 case BUILT_IN_MEMPCPY_CHK:
900 case BUILT_IN_MEMMOVE_CHK:
901 case BUILT_IN_MEMSET_CHK:
902 case BUILT_IN_STRNCPY_CHK:
903 case BUILT_IN_STPNCPY_CHK:
904 arg_idx = 2;
905 type = 2;
906 break;
907 case BUILT_IN_STRCPY_CHK:
908 case BUILT_IN_STPCPY_CHK:
909 arg_idx = 1;
910 type = 1;
911 break;
912 case BUILT_IN_SNPRINTF_CHK:
913 case BUILT_IN_VSNPRINTF_CHK:
914 arg_idx = 1;
915 type = 2;
916 break;
917 default:
918 return NULL_TREE;
921 if (arg_idx >= nargs)
922 return NULL_TREE;
924 /* Try to use the dataflow information gathered by the CCP process. */
925 visited = BITMAP_ALLOC (NULL);
926 bitmap_clear (visited);
928 memset (val, 0, sizeof (val));
929 a = gimple_call_arg (stmt, arg_idx);
930 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
931 val[arg_idx] = NULL_TREE;
933 BITMAP_FREE (visited);
935 result = NULL_TREE;
936 switch (DECL_FUNCTION_CODE (callee))
938 case BUILT_IN_STRLEN:
939 if (val[0] && nargs == 1)
941 tree new_val =
942 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
944 /* If the result is not a valid gimple value, or not a cast
945 of a valid gimple value, then we cannot use the result. */
946 if (is_gimple_val (new_val)
947 || (CONVERT_EXPR_P (new_val)
948 && is_gimple_val (TREE_OPERAND (new_val, 0))))
949 return new_val;
951 break;
953 case BUILT_IN_STRCPY:
954 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
955 result = fold_builtin_strcpy (loc, callee,
956 gimple_call_arg (stmt, 0),
957 gimple_call_arg (stmt, 1),
958 val[1]);
959 break;
961 case BUILT_IN_STRNCPY:
962 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
963 result = fold_builtin_strncpy (loc, callee,
964 gimple_call_arg (stmt, 0),
965 gimple_call_arg (stmt, 1),
966 gimple_call_arg (stmt, 2),
967 val[1]);
968 break;
970 case BUILT_IN_FPUTS:
971 if (nargs == 2)
972 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
973 gimple_call_arg (stmt, 1),
974 ignore, false, val[0]);
975 break;
977 case BUILT_IN_FPUTS_UNLOCKED:
978 if (nargs == 2)
979 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
980 gimple_call_arg (stmt, 1),
981 ignore, true, val[0]);
982 break;
984 case BUILT_IN_MEMCPY_CHK:
985 case BUILT_IN_MEMPCPY_CHK:
986 case BUILT_IN_MEMMOVE_CHK:
987 case BUILT_IN_MEMSET_CHK:
988 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
989 result = fold_builtin_memory_chk (loc, callee,
990 gimple_call_arg (stmt, 0),
991 gimple_call_arg (stmt, 1),
992 gimple_call_arg (stmt, 2),
993 gimple_call_arg (stmt, 3),
994 val[2], ignore,
995 DECL_FUNCTION_CODE (callee));
996 break;
998 case BUILT_IN_STRCPY_CHK:
999 case BUILT_IN_STPCPY_CHK:
1000 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1001 result = fold_builtin_stxcpy_chk (loc, callee,
1002 gimple_call_arg (stmt, 0),
1003 gimple_call_arg (stmt, 1),
1004 gimple_call_arg (stmt, 2),
1005 val[1], ignore,
1006 DECL_FUNCTION_CODE (callee));
1007 break;
1009 case BUILT_IN_STRNCPY_CHK:
1010 case BUILT_IN_STPNCPY_CHK:
1011 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1012 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1013 gimple_call_arg (stmt, 1),
1014 gimple_call_arg (stmt, 2),
1015 gimple_call_arg (stmt, 3),
1016 val[2], ignore,
1017 DECL_FUNCTION_CODE (callee));
1018 break;
1020 case BUILT_IN_SNPRINTF_CHK:
1021 case BUILT_IN_VSNPRINTF_CHK:
1022 if (val[1] && is_gimple_val (val[1]))
1023 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1024 DECL_FUNCTION_CODE (callee));
1025 break;
1027 default:
1028 gcc_unreachable ();
1031 if (result && ignore)
1032 result = fold_ignored_result (result);
1033 return result;
1037 /* Return a binfo to be used for devirtualization of calls based on an object
1038 represented by a declaration (i.e. a global or automatically allocated one)
1039 or NULL if it cannot be found or is not safe. CST is expected to be an
1040 ADDR_EXPR of such object or the function will return NULL. Currently it is
1041 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1042 EXPECTED_TYPE is type of the class virtual belongs to. */
1044 tree
1045 gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type)
1047 HOST_WIDE_INT offset, size, max_size;
1048 tree base, type, binfo;
1049 bool last_artificial = false;
1051 if (!flag_devirtualize
1052 || TREE_CODE (cst) != ADDR_EXPR
1053 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1054 return NULL_TREE;
1056 cst = TREE_OPERAND (cst, 0);
1057 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1058 type = TREE_TYPE (base);
1059 if (!DECL_P (base)
1060 || max_size == -1
1061 || max_size != size
1062 || TREE_CODE (type) != RECORD_TYPE)
1063 return NULL_TREE;
1065 /* Find the sub-object the constant actually refers to and mark whether it is
1066 an artificial one (as opposed to a user-defined one). */
1067 while (true)
1069 HOST_WIDE_INT pos, size;
1070 tree fld;
1072 if (types_same_for_odr (type, expected_type))
1073 break;
1074 if (offset < 0)
1075 return NULL_TREE;
1077 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1079 if (TREE_CODE (fld) != FIELD_DECL)
1080 continue;
1082 pos = int_bit_position (fld);
1083 size = tree_to_uhwi (DECL_SIZE (fld));
1084 if (pos <= offset && (pos + size) > offset)
1085 break;
1087 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1088 return NULL_TREE;
1090 last_artificial = DECL_ARTIFICIAL (fld);
1091 type = TREE_TYPE (fld);
1092 offset -= pos;
1094 /* Artificial sub-objects are ancestors, we do not want to use them for
1095 devirtualization, at least not here. */
1096 if (last_artificial)
1097 return NULL_TREE;
1098 binfo = TYPE_BINFO (type);
1099 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1100 return NULL_TREE;
1101 else
1102 return binfo;
1105 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1106 The statement may be replaced by another statement, e.g., if the call
1107 simplifies to a constant value. Return true if any changes were made.
1108 It is assumed that the operands have been previously folded. */
1110 static bool
1111 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1113 gimple stmt = gsi_stmt (*gsi);
1114 tree callee;
1115 bool changed = false;
1116 unsigned i;
1118 /* Fold *& in call arguments. */
1119 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1120 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1122 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1123 if (tmp)
1125 gimple_call_set_arg (stmt, i, tmp);
1126 changed = true;
1130 /* Check for virtual calls that became direct calls. */
1131 callee = gimple_call_fn (stmt);
1132 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1134 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1136 if (dump_file && virtual_method_call_p (callee)
1137 && !possible_polymorphic_call_target_p
1138 (callee, cgraph_get_node (gimple_call_addr_fndecl
1139 (OBJ_TYPE_REF_EXPR (callee)))))
1141 fprintf (dump_file,
1142 "Type inheritnace inconsistent devirtualization of ");
1143 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1144 fprintf (dump_file, " to ");
1145 print_generic_expr (dump_file, callee, TDF_SLIM);
1146 fprintf (dump_file, "\n");
1149 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1150 changed = true;
1152 else if (virtual_method_call_p (callee))
1154 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1155 tree binfo = gimple_extract_devirt_binfo_from_cst
1156 (obj, obj_type_ref_class (callee));
1157 if (binfo)
1159 HOST_WIDE_INT token
1160 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1161 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1162 if (fndecl)
1164 #ifdef ENABLE_CHECKING
1165 gcc_assert (possible_polymorphic_call_target_p
1166 (callee, cgraph_get_node (fndecl)));
1168 #endif
1169 gimple_call_set_fndecl (stmt, fndecl);
1170 changed = true;
1176 if (inplace)
1177 return changed;
1179 /* Check for builtins that CCP can handle using information not
1180 available in the generic fold routines. */
1181 callee = gimple_call_fndecl (stmt);
1182 if (callee && DECL_BUILT_IN (callee))
1184 tree result = gimple_fold_builtin (stmt);
1185 if (result)
1187 if (!update_call_from_tree (gsi, result))
1188 gimplify_and_update_call_from_tree (gsi, result);
1189 changed = true;
1191 else if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1192 changed |= targetm.gimple_fold_builtin (gsi);
1195 return changed;
1198 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1199 distinguishes both cases. */
1201 static bool
1202 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1204 bool changed = false;
1205 gimple stmt = gsi_stmt (*gsi);
1206 unsigned i;
1208 /* Fold the main computation performed by the statement. */
1209 switch (gimple_code (stmt))
1211 case GIMPLE_ASSIGN:
1213 unsigned old_num_ops = gimple_num_ops (stmt);
1214 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1215 tree lhs = gimple_assign_lhs (stmt);
1216 tree new_rhs;
1217 /* First canonicalize operand order. This avoids building new
1218 trees if this is the only thing fold would later do. */
1219 if ((commutative_tree_code (subcode)
1220 || commutative_ternary_tree_code (subcode))
1221 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1222 gimple_assign_rhs2 (stmt), false))
1224 tree tem = gimple_assign_rhs1 (stmt);
1225 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1226 gimple_assign_set_rhs2 (stmt, tem);
1227 changed = true;
1229 new_rhs = fold_gimple_assign (gsi);
1230 if (new_rhs
1231 && !useless_type_conversion_p (TREE_TYPE (lhs),
1232 TREE_TYPE (new_rhs)))
1233 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1234 if (new_rhs
1235 && (!inplace
1236 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1238 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1239 changed = true;
1241 break;
1244 case GIMPLE_COND:
1245 changed |= fold_gimple_cond (stmt);
1246 break;
1248 case GIMPLE_CALL:
1249 changed |= gimple_fold_call (gsi, inplace);
1250 break;
1252 case GIMPLE_ASM:
1253 /* Fold *& in asm operands. */
1255 size_t noutputs;
1256 const char **oconstraints;
1257 const char *constraint;
1258 bool allows_mem, allows_reg;
1260 noutputs = gimple_asm_noutputs (stmt);
1261 oconstraints = XALLOCAVEC (const char *, noutputs);
1263 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1265 tree link = gimple_asm_output_op (stmt, i);
1266 tree op = TREE_VALUE (link);
1267 oconstraints[i]
1268 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1269 if (REFERENCE_CLASS_P (op)
1270 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1272 TREE_VALUE (link) = op;
1273 changed = true;
1276 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1278 tree link = gimple_asm_input_op (stmt, i);
1279 tree op = TREE_VALUE (link);
1280 constraint
1281 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1282 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1283 oconstraints, &allows_mem, &allows_reg);
1284 if (REFERENCE_CLASS_P (op)
1285 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1286 != NULL_TREE)
1288 TREE_VALUE (link) = op;
1289 changed = true;
1293 break;
1295 case GIMPLE_DEBUG:
1296 if (gimple_debug_bind_p (stmt))
1298 tree val = gimple_debug_bind_get_value (stmt);
1299 if (val
1300 && REFERENCE_CLASS_P (val))
1302 tree tem = maybe_fold_reference (val, false);
1303 if (tem)
1305 gimple_debug_bind_set_value (stmt, tem);
1306 changed = true;
1309 else if (val
1310 && TREE_CODE (val) == ADDR_EXPR)
1312 tree ref = TREE_OPERAND (val, 0);
1313 tree tem = maybe_fold_reference (ref, false);
1314 if (tem)
1316 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1317 gimple_debug_bind_set_value (stmt, tem);
1318 changed = true;
1322 break;
1324 default:;
1327 stmt = gsi_stmt (*gsi);
1329 /* Fold *& on the lhs. */
1330 if (gimple_has_lhs (stmt))
1332 tree lhs = gimple_get_lhs (stmt);
1333 if (lhs && REFERENCE_CLASS_P (lhs))
1335 tree new_lhs = maybe_fold_reference (lhs, true);
1336 if (new_lhs)
1338 gimple_set_lhs (stmt, new_lhs);
1339 changed = true;
1344 return changed;
1347 /* Fold the statement pointed to by GSI. In some cases, this function may
1348 replace the whole statement with a new one. Returns true iff folding
1349 makes any changes.
1350 The statement pointed to by GSI should be in valid gimple form but may
1351 be in unfolded state as resulting from for example constant propagation
1352 which can produce *&x = 0. */
1354 bool
1355 fold_stmt (gimple_stmt_iterator *gsi)
1357 return fold_stmt_1 (gsi, false);
1360 /* Perform the minimal folding on statement *GSI. Only operations like
1361 *&x created by constant propagation are handled. The statement cannot
1362 be replaced with a new one. Return true if the statement was
1363 changed, false otherwise.
1364 The statement *GSI should be in valid gimple form but may
1365 be in unfolded state as resulting from for example constant propagation
1366 which can produce *&x = 0. */
1368 bool
1369 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1371 gimple stmt = gsi_stmt (*gsi);
1372 bool changed = fold_stmt_1 (gsi, true);
1373 gcc_assert (gsi_stmt (*gsi) == stmt);
1374 return changed;
1377 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1378 if EXPR is null or we don't know how.
1379 If non-null, the result always has boolean type. */
1381 static tree
1382 canonicalize_bool (tree expr, bool invert)
1384 if (!expr)
1385 return NULL_TREE;
1386 else if (invert)
1388 if (integer_nonzerop (expr))
1389 return boolean_false_node;
1390 else if (integer_zerop (expr))
1391 return boolean_true_node;
1392 else if (TREE_CODE (expr) == SSA_NAME)
1393 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1394 build_int_cst (TREE_TYPE (expr), 0));
1395 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1396 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1397 boolean_type_node,
1398 TREE_OPERAND (expr, 0),
1399 TREE_OPERAND (expr, 1));
1400 else
1401 return NULL_TREE;
1403 else
1405 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1406 return expr;
1407 if (integer_nonzerop (expr))
1408 return boolean_true_node;
1409 else if (integer_zerop (expr))
1410 return boolean_false_node;
1411 else if (TREE_CODE (expr) == SSA_NAME)
1412 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1413 build_int_cst (TREE_TYPE (expr), 0));
1414 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1415 return fold_build2 (TREE_CODE (expr),
1416 boolean_type_node,
1417 TREE_OPERAND (expr, 0),
1418 TREE_OPERAND (expr, 1));
1419 else
1420 return NULL_TREE;
1424 /* Check to see if a boolean expression EXPR is logically equivalent to the
1425 comparison (OP1 CODE OP2). Check for various identities involving
1426 SSA_NAMEs. */
1428 static bool
1429 same_bool_comparison_p (const_tree expr, enum tree_code code,
1430 const_tree op1, const_tree op2)
1432 gimple s;
1434 /* The obvious case. */
1435 if (TREE_CODE (expr) == code
1436 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1437 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1438 return true;
1440 /* Check for comparing (name, name != 0) and the case where expr
1441 is an SSA_NAME with a definition matching the comparison. */
1442 if (TREE_CODE (expr) == SSA_NAME
1443 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1445 if (operand_equal_p (expr, op1, 0))
1446 return ((code == NE_EXPR && integer_zerop (op2))
1447 || (code == EQ_EXPR && integer_nonzerop (op2)));
1448 s = SSA_NAME_DEF_STMT (expr);
1449 if (is_gimple_assign (s)
1450 && gimple_assign_rhs_code (s) == code
1451 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1452 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1453 return true;
1456 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1457 of name is a comparison, recurse. */
1458 if (TREE_CODE (op1) == SSA_NAME
1459 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1461 s = SSA_NAME_DEF_STMT (op1);
1462 if (is_gimple_assign (s)
1463 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1465 enum tree_code c = gimple_assign_rhs_code (s);
1466 if ((c == NE_EXPR && integer_zerop (op2))
1467 || (c == EQ_EXPR && integer_nonzerop (op2)))
1468 return same_bool_comparison_p (expr, c,
1469 gimple_assign_rhs1 (s),
1470 gimple_assign_rhs2 (s));
1471 if ((c == EQ_EXPR && integer_zerop (op2))
1472 || (c == NE_EXPR && integer_nonzerop (op2)))
1473 return same_bool_comparison_p (expr,
1474 invert_tree_comparison (c, false),
1475 gimple_assign_rhs1 (s),
1476 gimple_assign_rhs2 (s));
1479 return false;
1482 /* Check to see if two boolean expressions OP1 and OP2 are logically
1483 equivalent. */
1485 static bool
1486 same_bool_result_p (const_tree op1, const_tree op2)
1488 /* Simple cases first. */
1489 if (operand_equal_p (op1, op2, 0))
1490 return true;
1492 /* Check the cases where at least one of the operands is a comparison.
1493 These are a bit smarter than operand_equal_p in that they apply some
1494 identifies on SSA_NAMEs. */
1495 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1496 && same_bool_comparison_p (op1, TREE_CODE (op2),
1497 TREE_OPERAND (op2, 0),
1498 TREE_OPERAND (op2, 1)))
1499 return true;
1500 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1501 && same_bool_comparison_p (op2, TREE_CODE (op1),
1502 TREE_OPERAND (op1, 0),
1503 TREE_OPERAND (op1, 1)))
1504 return true;
1506 /* Default case. */
1507 return false;
1510 /* Forward declarations for some mutually recursive functions. */
1512 static tree
1513 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1514 enum tree_code code2, tree op2a, tree op2b);
1515 static tree
1516 and_var_with_comparison (tree var, bool invert,
1517 enum tree_code code2, tree op2a, tree op2b);
1518 static tree
1519 and_var_with_comparison_1 (gimple stmt,
1520 enum tree_code code2, tree op2a, tree op2b);
1521 static tree
1522 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1523 enum tree_code code2, tree op2a, tree op2b);
1524 static tree
1525 or_var_with_comparison (tree var, bool invert,
1526 enum tree_code code2, tree op2a, tree op2b);
1527 static tree
1528 or_var_with_comparison_1 (gimple stmt,
1529 enum tree_code code2, tree op2a, tree op2b);
1531 /* Helper function for and_comparisons_1: try to simplify the AND of the
1532 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1533 If INVERT is true, invert the value of the VAR before doing the AND.
1534 Return NULL_EXPR if we can't simplify this to a single expression. */
1536 static tree
1537 and_var_with_comparison (tree var, bool invert,
1538 enum tree_code code2, tree op2a, tree op2b)
1540 tree t;
1541 gimple stmt = SSA_NAME_DEF_STMT (var);
1543 /* We can only deal with variables whose definitions are assignments. */
1544 if (!is_gimple_assign (stmt))
1545 return NULL_TREE;
1547 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1548 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1549 Then we only have to consider the simpler non-inverted cases. */
1550 if (invert)
1551 t = or_var_with_comparison_1 (stmt,
1552 invert_tree_comparison (code2, false),
1553 op2a, op2b);
1554 else
1555 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1556 return canonicalize_bool (t, invert);
1559 /* Try to simplify the AND of the ssa variable defined by the assignment
1560 STMT with the comparison specified by (OP2A CODE2 OP2B).
1561 Return NULL_EXPR if we can't simplify this to a single expression. */
1563 static tree
1564 and_var_with_comparison_1 (gimple stmt,
1565 enum tree_code code2, tree op2a, tree op2b)
1567 tree var = gimple_assign_lhs (stmt);
1568 tree true_test_var = NULL_TREE;
1569 tree false_test_var = NULL_TREE;
1570 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1572 /* Check for identities like (var AND (var == 0)) => false. */
1573 if (TREE_CODE (op2a) == SSA_NAME
1574 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1576 if ((code2 == NE_EXPR && integer_zerop (op2b))
1577 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1579 true_test_var = op2a;
1580 if (var == true_test_var)
1581 return var;
1583 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1584 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1586 false_test_var = op2a;
1587 if (var == false_test_var)
1588 return boolean_false_node;
1592 /* If the definition is a comparison, recurse on it. */
1593 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1595 tree t = and_comparisons_1 (innercode,
1596 gimple_assign_rhs1 (stmt),
1597 gimple_assign_rhs2 (stmt),
1598 code2,
1599 op2a,
1600 op2b);
1601 if (t)
1602 return t;
1605 /* If the definition is an AND or OR expression, we may be able to
1606 simplify by reassociating. */
1607 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1608 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1610 tree inner1 = gimple_assign_rhs1 (stmt);
1611 tree inner2 = gimple_assign_rhs2 (stmt);
1612 gimple s;
1613 tree t;
1614 tree partial = NULL_TREE;
1615 bool is_and = (innercode == BIT_AND_EXPR);
1617 /* Check for boolean identities that don't require recursive examination
1618 of inner1/inner2:
1619 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1620 inner1 AND (inner1 OR inner2) => inner1
1621 !inner1 AND (inner1 AND inner2) => false
1622 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1623 Likewise for similar cases involving inner2. */
1624 if (inner1 == true_test_var)
1625 return (is_and ? var : inner1);
1626 else if (inner2 == true_test_var)
1627 return (is_and ? var : inner2);
1628 else if (inner1 == false_test_var)
1629 return (is_and
1630 ? boolean_false_node
1631 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1632 else if (inner2 == false_test_var)
1633 return (is_and
1634 ? boolean_false_node
1635 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1637 /* Next, redistribute/reassociate the AND across the inner tests.
1638 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1639 if (TREE_CODE (inner1) == SSA_NAME
1640 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1641 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1642 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1643 gimple_assign_rhs1 (s),
1644 gimple_assign_rhs2 (s),
1645 code2, op2a, op2b)))
1647 /* Handle the AND case, where we are reassociating:
1648 (inner1 AND inner2) AND (op2a code2 op2b)
1649 => (t AND inner2)
1650 If the partial result t is a constant, we win. Otherwise
1651 continue on to try reassociating with the other inner test. */
1652 if (is_and)
1654 if (integer_onep (t))
1655 return inner2;
1656 else if (integer_zerop (t))
1657 return boolean_false_node;
1660 /* Handle the OR case, where we are redistributing:
1661 (inner1 OR inner2) AND (op2a code2 op2b)
1662 => (t OR (inner2 AND (op2a code2 op2b))) */
1663 else if (integer_onep (t))
1664 return boolean_true_node;
1666 /* Save partial result for later. */
1667 partial = t;
1670 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1671 if (TREE_CODE (inner2) == SSA_NAME
1672 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1673 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1674 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1675 gimple_assign_rhs1 (s),
1676 gimple_assign_rhs2 (s),
1677 code2, op2a, op2b)))
1679 /* Handle the AND case, where we are reassociating:
1680 (inner1 AND inner2) AND (op2a code2 op2b)
1681 => (inner1 AND t) */
1682 if (is_and)
1684 if (integer_onep (t))
1685 return inner1;
1686 else if (integer_zerop (t))
1687 return boolean_false_node;
1688 /* If both are the same, we can apply the identity
1689 (x AND x) == x. */
1690 else if (partial && same_bool_result_p (t, partial))
1691 return t;
1694 /* Handle the OR case. where we are redistributing:
1695 (inner1 OR inner2) AND (op2a code2 op2b)
1696 => (t OR (inner1 AND (op2a code2 op2b)))
1697 => (t OR partial) */
1698 else
1700 if (integer_onep (t))
1701 return boolean_true_node;
1702 else if (partial)
1704 /* We already got a simplification for the other
1705 operand to the redistributed OR expression. The
1706 interesting case is when at least one is false.
1707 Or, if both are the same, we can apply the identity
1708 (x OR x) == x. */
1709 if (integer_zerop (partial))
1710 return t;
1711 else if (integer_zerop (t))
1712 return partial;
1713 else if (same_bool_result_p (t, partial))
1714 return t;
1719 return NULL_TREE;
1722 /* Try to simplify the AND of two comparisons defined by
1723 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1724 If this can be done without constructing an intermediate value,
1725 return the resulting tree; otherwise NULL_TREE is returned.
1726 This function is deliberately asymmetric as it recurses on SSA_DEFs
1727 in the first comparison but not the second. */
1729 static tree
1730 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1731 enum tree_code code2, tree op2a, tree op2b)
1733 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1735 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1736 if (operand_equal_p (op1a, op2a, 0)
1737 && operand_equal_p (op1b, op2b, 0))
1739 /* Result will be either NULL_TREE, or a combined comparison. */
1740 tree t = combine_comparisons (UNKNOWN_LOCATION,
1741 TRUTH_ANDIF_EXPR, code1, code2,
1742 truth_type, op1a, op1b);
1743 if (t)
1744 return t;
1747 /* Likewise the swapped case of the above. */
1748 if (operand_equal_p (op1a, op2b, 0)
1749 && operand_equal_p (op1b, op2a, 0))
1751 /* Result will be either NULL_TREE, or a combined comparison. */
1752 tree t = combine_comparisons (UNKNOWN_LOCATION,
1753 TRUTH_ANDIF_EXPR, code1,
1754 swap_tree_comparison (code2),
1755 truth_type, op1a, op1b);
1756 if (t)
1757 return t;
1760 /* If both comparisons are of the same value against constants, we might
1761 be able to merge them. */
1762 if (operand_equal_p (op1a, op2a, 0)
1763 && TREE_CODE (op1b) == INTEGER_CST
1764 && TREE_CODE (op2b) == INTEGER_CST)
1766 int cmp = tree_int_cst_compare (op1b, op2b);
1768 /* If we have (op1a == op1b), we should either be able to
1769 return that or FALSE, depending on whether the constant op1b
1770 also satisfies the other comparison against op2b. */
1771 if (code1 == EQ_EXPR)
1773 bool done = true;
1774 bool val;
1775 switch (code2)
1777 case EQ_EXPR: val = (cmp == 0); break;
1778 case NE_EXPR: val = (cmp != 0); break;
1779 case LT_EXPR: val = (cmp < 0); break;
1780 case GT_EXPR: val = (cmp > 0); break;
1781 case LE_EXPR: val = (cmp <= 0); break;
1782 case GE_EXPR: val = (cmp >= 0); break;
1783 default: done = false;
1785 if (done)
1787 if (val)
1788 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1789 else
1790 return boolean_false_node;
1793 /* Likewise if the second comparison is an == comparison. */
1794 else if (code2 == EQ_EXPR)
1796 bool done = true;
1797 bool val;
1798 switch (code1)
1800 case EQ_EXPR: val = (cmp == 0); break;
1801 case NE_EXPR: val = (cmp != 0); break;
1802 case LT_EXPR: val = (cmp > 0); break;
1803 case GT_EXPR: val = (cmp < 0); break;
1804 case LE_EXPR: val = (cmp >= 0); break;
1805 case GE_EXPR: val = (cmp <= 0); break;
1806 default: done = false;
1808 if (done)
1810 if (val)
1811 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1812 else
1813 return boolean_false_node;
1817 /* Same business with inequality tests. */
1818 else if (code1 == NE_EXPR)
1820 bool val;
1821 switch (code2)
1823 case EQ_EXPR: val = (cmp != 0); break;
1824 case NE_EXPR: val = (cmp == 0); break;
1825 case LT_EXPR: val = (cmp >= 0); break;
1826 case GT_EXPR: val = (cmp <= 0); break;
1827 case LE_EXPR: val = (cmp > 0); break;
1828 case GE_EXPR: val = (cmp < 0); break;
1829 default:
1830 val = false;
1832 if (val)
1833 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1835 else if (code2 == NE_EXPR)
1837 bool val;
1838 switch (code1)
1840 case EQ_EXPR: val = (cmp == 0); break;
1841 case NE_EXPR: val = (cmp != 0); break;
1842 case LT_EXPR: val = (cmp <= 0); break;
1843 case GT_EXPR: val = (cmp >= 0); break;
1844 case LE_EXPR: val = (cmp < 0); break;
1845 case GE_EXPR: val = (cmp > 0); break;
1846 default:
1847 val = false;
1849 if (val)
1850 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1853 /* Chose the more restrictive of two < or <= comparisons. */
1854 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1855 && (code2 == LT_EXPR || code2 == LE_EXPR))
1857 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1858 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1859 else
1860 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1863 /* Likewise chose the more restrictive of two > or >= comparisons. */
1864 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1865 && (code2 == GT_EXPR || code2 == GE_EXPR))
1867 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1868 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1869 else
1870 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1873 /* Check for singleton ranges. */
1874 else if (cmp == 0
1875 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1876 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1877 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1879 /* Check for disjoint ranges. */
1880 else if (cmp <= 0
1881 && (code1 == LT_EXPR || code1 == LE_EXPR)
1882 && (code2 == GT_EXPR || code2 == GE_EXPR))
1883 return boolean_false_node;
1884 else if (cmp >= 0
1885 && (code1 == GT_EXPR || code1 == GE_EXPR)
1886 && (code2 == LT_EXPR || code2 == LE_EXPR))
1887 return boolean_false_node;
1890 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1891 NAME's definition is a truth value. See if there are any simplifications
1892 that can be done against the NAME's definition. */
1893 if (TREE_CODE (op1a) == SSA_NAME
1894 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1895 && (integer_zerop (op1b) || integer_onep (op1b)))
1897 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1898 || (code1 == NE_EXPR && integer_onep (op1b)));
1899 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1900 switch (gimple_code (stmt))
1902 case GIMPLE_ASSIGN:
1903 /* Try to simplify by copy-propagating the definition. */
1904 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1906 case GIMPLE_PHI:
1907 /* If every argument to the PHI produces the same result when
1908 ANDed with the second comparison, we win.
1909 Do not do this unless the type is bool since we need a bool
1910 result here anyway. */
1911 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1913 tree result = NULL_TREE;
1914 unsigned i;
1915 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1917 tree arg = gimple_phi_arg_def (stmt, i);
1919 /* If this PHI has itself as an argument, ignore it.
1920 If all the other args produce the same result,
1921 we're still OK. */
1922 if (arg == gimple_phi_result (stmt))
1923 continue;
1924 else if (TREE_CODE (arg) == INTEGER_CST)
1926 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1928 if (!result)
1929 result = boolean_false_node;
1930 else if (!integer_zerop (result))
1931 return NULL_TREE;
1933 else if (!result)
1934 result = fold_build2 (code2, boolean_type_node,
1935 op2a, op2b);
1936 else if (!same_bool_comparison_p (result,
1937 code2, op2a, op2b))
1938 return NULL_TREE;
1940 else if (TREE_CODE (arg) == SSA_NAME
1941 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1943 tree temp;
1944 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1945 /* In simple cases we can look through PHI nodes,
1946 but we have to be careful with loops.
1947 See PR49073. */
1948 if (! dom_info_available_p (CDI_DOMINATORS)
1949 || gimple_bb (def_stmt) == gimple_bb (stmt)
1950 || dominated_by_p (CDI_DOMINATORS,
1951 gimple_bb (def_stmt),
1952 gimple_bb (stmt)))
1953 return NULL_TREE;
1954 temp = and_var_with_comparison (arg, invert, code2,
1955 op2a, op2b);
1956 if (!temp)
1957 return NULL_TREE;
1958 else if (!result)
1959 result = temp;
1960 else if (!same_bool_result_p (result, temp))
1961 return NULL_TREE;
1963 else
1964 return NULL_TREE;
1966 return result;
1969 default:
1970 break;
1973 return NULL_TREE;
1976 /* Try to simplify the AND of two comparisons, specified by
1977 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1978 If this can be simplified to a single expression (without requiring
1979 introducing more SSA variables to hold intermediate values),
1980 return the resulting tree. Otherwise return NULL_TREE.
1981 If the result expression is non-null, it has boolean type. */
1983 tree
1984 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1985 enum tree_code code2, tree op2a, tree op2b)
1987 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1988 if (t)
1989 return t;
1990 else
1991 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1994 /* Helper function for or_comparisons_1: try to simplify the OR of the
1995 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1996 If INVERT is true, invert the value of VAR before doing the OR.
1997 Return NULL_EXPR if we can't simplify this to a single expression. */
1999 static tree
2000 or_var_with_comparison (tree var, bool invert,
2001 enum tree_code code2, tree op2a, tree op2b)
2003 tree t;
2004 gimple stmt = SSA_NAME_DEF_STMT (var);
2006 /* We can only deal with variables whose definitions are assignments. */
2007 if (!is_gimple_assign (stmt))
2008 return NULL_TREE;
2010 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2011 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2012 Then we only have to consider the simpler non-inverted cases. */
2013 if (invert)
2014 t = and_var_with_comparison_1 (stmt,
2015 invert_tree_comparison (code2, false),
2016 op2a, op2b);
2017 else
2018 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2019 return canonicalize_bool (t, invert);
2022 /* Try to simplify the OR of the ssa variable defined by the assignment
2023 STMT with the comparison specified by (OP2A CODE2 OP2B).
2024 Return NULL_EXPR if we can't simplify this to a single expression. */
2026 static tree
2027 or_var_with_comparison_1 (gimple stmt,
2028 enum tree_code code2, tree op2a, tree op2b)
2030 tree var = gimple_assign_lhs (stmt);
2031 tree true_test_var = NULL_TREE;
2032 tree false_test_var = NULL_TREE;
2033 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2035 /* Check for identities like (var OR (var != 0)) => true . */
2036 if (TREE_CODE (op2a) == SSA_NAME
2037 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2039 if ((code2 == NE_EXPR && integer_zerop (op2b))
2040 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2042 true_test_var = op2a;
2043 if (var == true_test_var)
2044 return var;
2046 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2047 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2049 false_test_var = op2a;
2050 if (var == false_test_var)
2051 return boolean_true_node;
2055 /* If the definition is a comparison, recurse on it. */
2056 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2058 tree t = or_comparisons_1 (innercode,
2059 gimple_assign_rhs1 (stmt),
2060 gimple_assign_rhs2 (stmt),
2061 code2,
2062 op2a,
2063 op2b);
2064 if (t)
2065 return t;
2068 /* If the definition is an AND or OR expression, we may be able to
2069 simplify by reassociating. */
2070 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2071 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2073 tree inner1 = gimple_assign_rhs1 (stmt);
2074 tree inner2 = gimple_assign_rhs2 (stmt);
2075 gimple s;
2076 tree t;
2077 tree partial = NULL_TREE;
2078 bool is_or = (innercode == BIT_IOR_EXPR);
2080 /* Check for boolean identities that don't require recursive examination
2081 of inner1/inner2:
2082 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2083 inner1 OR (inner1 AND inner2) => inner1
2084 !inner1 OR (inner1 OR inner2) => true
2085 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2087 if (inner1 == true_test_var)
2088 return (is_or ? var : inner1);
2089 else if (inner2 == true_test_var)
2090 return (is_or ? var : inner2);
2091 else if (inner1 == false_test_var)
2092 return (is_or
2093 ? boolean_true_node
2094 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2095 else if (inner2 == false_test_var)
2096 return (is_or
2097 ? boolean_true_node
2098 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2100 /* Next, redistribute/reassociate the OR across the inner tests.
2101 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2102 if (TREE_CODE (inner1) == SSA_NAME
2103 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2104 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2105 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2106 gimple_assign_rhs1 (s),
2107 gimple_assign_rhs2 (s),
2108 code2, op2a, op2b)))
2110 /* Handle the OR case, where we are reassociating:
2111 (inner1 OR inner2) OR (op2a code2 op2b)
2112 => (t OR inner2)
2113 If the partial result t is a constant, we win. Otherwise
2114 continue on to try reassociating with the other inner test. */
2115 if (is_or)
2117 if (integer_onep (t))
2118 return boolean_true_node;
2119 else if (integer_zerop (t))
2120 return inner2;
2123 /* Handle the AND case, where we are redistributing:
2124 (inner1 AND inner2) OR (op2a code2 op2b)
2125 => (t AND (inner2 OR (op2a code op2b))) */
2126 else if (integer_zerop (t))
2127 return boolean_false_node;
2129 /* Save partial result for later. */
2130 partial = t;
2133 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2134 if (TREE_CODE (inner2) == SSA_NAME
2135 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2136 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2137 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2138 gimple_assign_rhs1 (s),
2139 gimple_assign_rhs2 (s),
2140 code2, op2a, op2b)))
2142 /* Handle the OR case, where we are reassociating:
2143 (inner1 OR inner2) OR (op2a code2 op2b)
2144 => (inner1 OR t)
2145 => (t OR partial) */
2146 if (is_or)
2148 if (integer_zerop (t))
2149 return inner1;
2150 else if (integer_onep (t))
2151 return boolean_true_node;
2152 /* If both are the same, we can apply the identity
2153 (x OR x) == x. */
2154 else if (partial && same_bool_result_p (t, partial))
2155 return t;
2158 /* Handle the AND case, where we are redistributing:
2159 (inner1 AND inner2) OR (op2a code2 op2b)
2160 => (t AND (inner1 OR (op2a code2 op2b)))
2161 => (t AND partial) */
2162 else
2164 if (integer_zerop (t))
2165 return boolean_false_node;
2166 else if (partial)
2168 /* We already got a simplification for the other
2169 operand to the redistributed AND expression. The
2170 interesting case is when at least one is true.
2171 Or, if both are the same, we can apply the identity
2172 (x AND x) == x. */
2173 if (integer_onep (partial))
2174 return t;
2175 else if (integer_onep (t))
2176 return partial;
2177 else if (same_bool_result_p (t, partial))
2178 return t;
2183 return NULL_TREE;
2186 /* Try to simplify the OR of two comparisons defined by
2187 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2188 If this can be done without constructing an intermediate value,
2189 return the resulting tree; otherwise NULL_TREE is returned.
2190 This function is deliberately asymmetric as it recurses on SSA_DEFs
2191 in the first comparison but not the second. */
2193 static tree
2194 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2195 enum tree_code code2, tree op2a, tree op2b)
2197 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2199 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2200 if (operand_equal_p (op1a, op2a, 0)
2201 && operand_equal_p (op1b, op2b, 0))
2203 /* Result will be either NULL_TREE, or a combined comparison. */
2204 tree t = combine_comparisons (UNKNOWN_LOCATION,
2205 TRUTH_ORIF_EXPR, code1, code2,
2206 truth_type, op1a, op1b);
2207 if (t)
2208 return t;
2211 /* Likewise the swapped case of the above. */
2212 if (operand_equal_p (op1a, op2b, 0)
2213 && operand_equal_p (op1b, op2a, 0))
2215 /* Result will be either NULL_TREE, or a combined comparison. */
2216 tree t = combine_comparisons (UNKNOWN_LOCATION,
2217 TRUTH_ORIF_EXPR, code1,
2218 swap_tree_comparison (code2),
2219 truth_type, op1a, op1b);
2220 if (t)
2221 return t;
2224 /* If both comparisons are of the same value against constants, we might
2225 be able to merge them. */
2226 if (operand_equal_p (op1a, op2a, 0)
2227 && TREE_CODE (op1b) == INTEGER_CST
2228 && TREE_CODE (op2b) == INTEGER_CST)
2230 int cmp = tree_int_cst_compare (op1b, op2b);
2232 /* If we have (op1a != op1b), we should either be able to
2233 return that or TRUE, depending on whether the constant op1b
2234 also satisfies the other comparison against op2b. */
2235 if (code1 == NE_EXPR)
2237 bool done = true;
2238 bool val;
2239 switch (code2)
2241 case EQ_EXPR: val = (cmp == 0); break;
2242 case NE_EXPR: val = (cmp != 0); break;
2243 case LT_EXPR: val = (cmp < 0); break;
2244 case GT_EXPR: val = (cmp > 0); break;
2245 case LE_EXPR: val = (cmp <= 0); break;
2246 case GE_EXPR: val = (cmp >= 0); break;
2247 default: done = false;
2249 if (done)
2251 if (val)
2252 return boolean_true_node;
2253 else
2254 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2257 /* Likewise if the second comparison is a != comparison. */
2258 else if (code2 == NE_EXPR)
2260 bool done = true;
2261 bool val;
2262 switch (code1)
2264 case EQ_EXPR: val = (cmp == 0); break;
2265 case NE_EXPR: val = (cmp != 0); break;
2266 case LT_EXPR: val = (cmp > 0); break;
2267 case GT_EXPR: val = (cmp < 0); break;
2268 case LE_EXPR: val = (cmp >= 0); break;
2269 case GE_EXPR: val = (cmp <= 0); break;
2270 default: done = false;
2272 if (done)
2274 if (val)
2275 return boolean_true_node;
2276 else
2277 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2281 /* See if an equality test is redundant with the other comparison. */
2282 else if (code1 == EQ_EXPR)
2284 bool val;
2285 switch (code2)
2287 case EQ_EXPR: val = (cmp == 0); break;
2288 case NE_EXPR: val = (cmp != 0); break;
2289 case LT_EXPR: val = (cmp < 0); break;
2290 case GT_EXPR: val = (cmp > 0); break;
2291 case LE_EXPR: val = (cmp <= 0); break;
2292 case GE_EXPR: val = (cmp >= 0); break;
2293 default:
2294 val = false;
2296 if (val)
2297 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2299 else if (code2 == EQ_EXPR)
2301 bool val;
2302 switch (code1)
2304 case EQ_EXPR: val = (cmp == 0); break;
2305 case NE_EXPR: val = (cmp != 0); break;
2306 case LT_EXPR: val = (cmp > 0); break;
2307 case GT_EXPR: val = (cmp < 0); break;
2308 case LE_EXPR: val = (cmp >= 0); break;
2309 case GE_EXPR: val = (cmp <= 0); break;
2310 default:
2311 val = false;
2313 if (val)
2314 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2317 /* Chose the less restrictive of two < or <= comparisons. */
2318 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2319 && (code2 == LT_EXPR || code2 == LE_EXPR))
2321 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2322 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2323 else
2324 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2327 /* Likewise chose the less restrictive of two > or >= comparisons. */
2328 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2329 && (code2 == GT_EXPR || code2 == GE_EXPR))
2331 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2332 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2333 else
2334 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2337 /* Check for singleton ranges. */
2338 else if (cmp == 0
2339 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2340 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2341 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2343 /* Check for less/greater pairs that don't restrict the range at all. */
2344 else if (cmp >= 0
2345 && (code1 == LT_EXPR || code1 == LE_EXPR)
2346 && (code2 == GT_EXPR || code2 == GE_EXPR))
2347 return boolean_true_node;
2348 else if (cmp <= 0
2349 && (code1 == GT_EXPR || code1 == GE_EXPR)
2350 && (code2 == LT_EXPR || code2 == LE_EXPR))
2351 return boolean_true_node;
2354 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2355 NAME's definition is a truth value. See if there are any simplifications
2356 that can be done against the NAME's definition. */
2357 if (TREE_CODE (op1a) == SSA_NAME
2358 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2359 && (integer_zerop (op1b) || integer_onep (op1b)))
2361 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2362 || (code1 == NE_EXPR && integer_onep (op1b)));
2363 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2364 switch (gimple_code (stmt))
2366 case GIMPLE_ASSIGN:
2367 /* Try to simplify by copy-propagating the definition. */
2368 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2370 case GIMPLE_PHI:
2371 /* If every argument to the PHI produces the same result when
2372 ORed with the second comparison, we win.
2373 Do not do this unless the type is bool since we need a bool
2374 result here anyway. */
2375 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2377 tree result = NULL_TREE;
2378 unsigned i;
2379 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2381 tree arg = gimple_phi_arg_def (stmt, i);
2383 /* If this PHI has itself as an argument, ignore it.
2384 If all the other args produce the same result,
2385 we're still OK. */
2386 if (arg == gimple_phi_result (stmt))
2387 continue;
2388 else if (TREE_CODE (arg) == INTEGER_CST)
2390 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2392 if (!result)
2393 result = boolean_true_node;
2394 else if (!integer_onep (result))
2395 return NULL_TREE;
2397 else if (!result)
2398 result = fold_build2 (code2, boolean_type_node,
2399 op2a, op2b);
2400 else if (!same_bool_comparison_p (result,
2401 code2, op2a, op2b))
2402 return NULL_TREE;
2404 else if (TREE_CODE (arg) == SSA_NAME
2405 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2407 tree temp;
2408 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2409 /* In simple cases we can look through PHI nodes,
2410 but we have to be careful with loops.
2411 See PR49073. */
2412 if (! dom_info_available_p (CDI_DOMINATORS)
2413 || gimple_bb (def_stmt) == gimple_bb (stmt)
2414 || dominated_by_p (CDI_DOMINATORS,
2415 gimple_bb (def_stmt),
2416 gimple_bb (stmt)))
2417 return NULL_TREE;
2418 temp = or_var_with_comparison (arg, invert, code2,
2419 op2a, op2b);
2420 if (!temp)
2421 return NULL_TREE;
2422 else if (!result)
2423 result = temp;
2424 else if (!same_bool_result_p (result, temp))
2425 return NULL_TREE;
2427 else
2428 return NULL_TREE;
2430 return result;
2433 default:
2434 break;
2437 return NULL_TREE;
2440 /* Try to simplify the OR of two comparisons, specified by
2441 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2442 If this can be simplified to a single expression (without requiring
2443 introducing more SSA variables to hold intermediate values),
2444 return the resulting tree. Otherwise return NULL_TREE.
2445 If the result expression is non-null, it has boolean type. */
2447 tree
2448 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2449 enum tree_code code2, tree op2a, tree op2b)
2451 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2452 if (t)
2453 return t;
2454 else
2455 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2459 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2461 Either NULL_TREE, a simplified but non-constant or a constant
2462 is returned.
2464 ??? This should go into a gimple-fold-inline.h file to be eventually
2465 privatized with the single valueize function used in the various TUs
2466 to avoid the indirect function call overhead. */
2468 tree
2469 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2471 location_t loc = gimple_location (stmt);
2472 switch (gimple_code (stmt))
2474 case GIMPLE_ASSIGN:
2476 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2478 switch (get_gimple_rhs_class (subcode))
2480 case GIMPLE_SINGLE_RHS:
2482 tree rhs = gimple_assign_rhs1 (stmt);
2483 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2485 if (TREE_CODE (rhs) == SSA_NAME)
2487 /* If the RHS is an SSA_NAME, return its known constant value,
2488 if any. */
2489 return (*valueize) (rhs);
2491 /* Handle propagating invariant addresses into address
2492 operations. */
2493 else if (TREE_CODE (rhs) == ADDR_EXPR
2494 && !is_gimple_min_invariant (rhs))
2496 HOST_WIDE_INT offset = 0;
2497 tree base;
2498 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2499 &offset,
2500 valueize);
2501 if (base
2502 && (CONSTANT_CLASS_P (base)
2503 || decl_address_invariant_p (base)))
2504 return build_invariant_address (TREE_TYPE (rhs),
2505 base, offset);
2507 else if (TREE_CODE (rhs) == CONSTRUCTOR
2508 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2509 && (CONSTRUCTOR_NELTS (rhs)
2510 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2512 unsigned i;
2513 tree val, *vec;
2515 vec = XALLOCAVEC (tree,
2516 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2517 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2519 val = (*valueize) (val);
2520 if (TREE_CODE (val) == INTEGER_CST
2521 || TREE_CODE (val) == REAL_CST
2522 || TREE_CODE (val) == FIXED_CST)
2523 vec[i] = val;
2524 else
2525 return NULL_TREE;
2528 return build_vector (TREE_TYPE (rhs), vec);
2531 if (kind == tcc_reference)
2533 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2534 || TREE_CODE (rhs) == REALPART_EXPR
2535 || TREE_CODE (rhs) == IMAGPART_EXPR)
2536 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2538 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2539 return fold_unary_loc (EXPR_LOCATION (rhs),
2540 TREE_CODE (rhs),
2541 TREE_TYPE (rhs), val);
2543 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2544 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2546 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2547 return fold_ternary_loc (EXPR_LOCATION (rhs),
2548 TREE_CODE (rhs),
2549 TREE_TYPE (rhs), val,
2550 TREE_OPERAND (rhs, 1),
2551 TREE_OPERAND (rhs, 2));
2553 else if (TREE_CODE (rhs) == MEM_REF
2554 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2556 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2557 if (TREE_CODE (val) == ADDR_EXPR
2558 && is_gimple_min_invariant (val))
2560 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2561 unshare_expr (val),
2562 TREE_OPERAND (rhs, 1));
2563 if (tem)
2564 rhs = tem;
2567 return fold_const_aggregate_ref_1 (rhs, valueize);
2569 else if (kind == tcc_declaration)
2570 return get_symbol_constant_value (rhs);
2571 return rhs;
2574 case GIMPLE_UNARY_RHS:
2576 /* Handle unary operators that can appear in GIMPLE form.
2577 Note that we know the single operand must be a constant,
2578 so this should almost always return a simplified RHS. */
2579 tree lhs = gimple_assign_lhs (stmt);
2580 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2582 /* Conversions are useless for CCP purposes if they are
2583 value-preserving. Thus the restrictions that
2584 useless_type_conversion_p places for restrict qualification
2585 of pointer types should not apply here.
2586 Substitution later will only substitute to allowed places. */
2587 if (CONVERT_EXPR_CODE_P (subcode)
2588 && POINTER_TYPE_P (TREE_TYPE (lhs))
2589 && POINTER_TYPE_P (TREE_TYPE (op0))
2590 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2591 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2592 && TYPE_MODE (TREE_TYPE (lhs))
2593 == TYPE_MODE (TREE_TYPE (op0)))
2594 return op0;
2596 return
2597 fold_unary_ignore_overflow_loc (loc, subcode,
2598 gimple_expr_type (stmt), op0);
2601 case GIMPLE_BINARY_RHS:
2603 /* Handle binary operators that can appear in GIMPLE form. */
2604 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2605 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2607 /* Translate &x + CST into an invariant form suitable for
2608 further propagation. */
2609 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2610 && TREE_CODE (op0) == ADDR_EXPR
2611 && TREE_CODE (op1) == INTEGER_CST)
2613 tree off = fold_convert (ptr_type_node, op1);
2614 return build_fold_addr_expr_loc
2615 (loc,
2616 fold_build2 (MEM_REF,
2617 TREE_TYPE (TREE_TYPE (op0)),
2618 unshare_expr (op0), off));
2621 return fold_binary_loc (loc, subcode,
2622 gimple_expr_type (stmt), op0, op1);
2625 case GIMPLE_TERNARY_RHS:
2627 /* Handle ternary operators that can appear in GIMPLE form. */
2628 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2629 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2630 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2632 /* Fold embedded expressions in ternary codes. */
2633 if ((subcode == COND_EXPR
2634 || subcode == VEC_COND_EXPR)
2635 && COMPARISON_CLASS_P (op0))
2637 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2638 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2639 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2640 TREE_TYPE (op0), op00, op01);
2641 if (tem)
2642 op0 = tem;
2645 return fold_ternary_loc (loc, subcode,
2646 gimple_expr_type (stmt), op0, op1, op2);
2649 default:
2650 gcc_unreachable ();
2654 case GIMPLE_CALL:
2656 tree fn;
2658 if (gimple_call_internal_p (stmt))
2659 /* No folding yet for these functions. */
2660 return NULL_TREE;
2662 fn = (*valueize) (gimple_call_fn (stmt));
2663 if (TREE_CODE (fn) == ADDR_EXPR
2664 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2665 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2667 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2668 tree call, retval;
2669 unsigned i;
2670 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2671 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2672 call = build_call_array_loc (loc,
2673 gimple_call_return_type (stmt),
2674 fn, gimple_call_num_args (stmt), args);
2675 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2676 if (retval)
2677 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2678 STRIP_NOPS (retval);
2679 return retval;
2681 return NULL_TREE;
2684 default:
2685 return NULL_TREE;
2689 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2690 Returns NULL_TREE if folding to a constant is not possible, otherwise
2691 returns a constant according to is_gimple_min_invariant. */
2693 tree
2694 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2696 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2697 if (res && is_gimple_min_invariant (res))
2698 return res;
2699 return NULL_TREE;
2703 /* The following set of functions are supposed to fold references using
2704 their constant initializers. */
2706 static tree fold_ctor_reference (tree type, tree ctor,
2707 unsigned HOST_WIDE_INT offset,
2708 unsigned HOST_WIDE_INT size, tree);
2710 /* See if we can find constructor defining value of BASE.
2711 When we know the consructor with constant offset (such as
2712 base is array[40] and we do know constructor of array), then
2713 BIT_OFFSET is adjusted accordingly.
2715 As a special case, return error_mark_node when constructor
2716 is not explicitly available, but it is known to be zero
2717 such as 'static const int a;'. */
2718 static tree
2719 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2720 tree (*valueize)(tree))
2722 HOST_WIDE_INT bit_offset2, size, max_size;
2723 if (TREE_CODE (base) == MEM_REF)
2725 if (!integer_zerop (TREE_OPERAND (base, 1)))
2727 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
2728 return NULL_TREE;
2729 *bit_offset += (mem_ref_offset (base).low
2730 * BITS_PER_UNIT);
2733 if (valueize
2734 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2735 base = valueize (TREE_OPERAND (base, 0));
2736 if (!base || TREE_CODE (base) != ADDR_EXPR)
2737 return NULL_TREE;
2738 base = TREE_OPERAND (base, 0);
2741 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2742 DECL_INITIAL. If BASE is a nested reference into another
2743 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2744 the inner reference. */
2745 switch (TREE_CODE (base))
2747 case VAR_DECL:
2748 case CONST_DECL:
2750 tree init = ctor_for_folding (base);
2752 /* Our semantic is exact opposite of ctor_for_folding;
2753 NULL means unknown, while error_mark_node is 0. */
2754 if (init == error_mark_node)
2755 return NULL_TREE;
2756 if (!init)
2757 return error_mark_node;
2758 return init;
2761 case ARRAY_REF:
2762 case COMPONENT_REF:
2763 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2764 if (max_size == -1 || size != max_size)
2765 return NULL_TREE;
2766 *bit_offset += bit_offset2;
2767 return get_base_constructor (base, bit_offset, valueize);
2769 case STRING_CST:
2770 case CONSTRUCTOR:
2771 return base;
2773 default:
2774 return NULL_TREE;
2778 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2779 to the memory at bit OFFSET.
2781 We do only simple job of folding byte accesses. */
2783 static tree
2784 fold_string_cst_ctor_reference (tree type, tree ctor,
2785 unsigned HOST_WIDE_INT offset,
2786 unsigned HOST_WIDE_INT size)
2788 if (INTEGRAL_TYPE_P (type)
2789 && (TYPE_MODE (type)
2790 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2791 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2792 == MODE_INT)
2793 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2794 && size == BITS_PER_UNIT
2795 && !(offset % BITS_PER_UNIT))
2797 offset /= BITS_PER_UNIT;
2798 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2799 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2800 [offset]));
2801 /* Folding
2802 const char a[20]="hello";
2803 return a[10];
2805 might lead to offset greater than string length. In this case we
2806 know value is either initialized to 0 or out of bounds. Return 0
2807 in both cases. */
2808 return build_zero_cst (type);
2810 return NULL_TREE;
2813 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2814 SIZE to the memory at bit OFFSET. */
2816 static tree
2817 fold_array_ctor_reference (tree type, tree ctor,
2818 unsigned HOST_WIDE_INT offset,
2819 unsigned HOST_WIDE_INT size,
2820 tree from_decl)
2822 unsigned HOST_WIDE_INT cnt;
2823 tree cfield, cval;
2824 double_int low_bound, elt_size;
2825 double_int index, max_index;
2826 double_int access_index;
2827 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2828 HOST_WIDE_INT inner_offset;
2830 /* Compute low bound and elt size. */
2831 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2832 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2833 if (domain_type && TYPE_MIN_VALUE (domain_type))
2835 /* Static constructors for variably sized objects makes no sense. */
2836 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2837 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2838 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2840 else
2841 low_bound = double_int_zero;
2842 /* Static constructors for variably sized objects makes no sense. */
2843 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2844 == INTEGER_CST);
2845 elt_size =
2846 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2849 /* We can handle only constantly sized accesses that are known to not
2850 be larger than size of array element. */
2851 if (!TYPE_SIZE_UNIT (type)
2852 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2853 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2854 return NULL_TREE;
2856 /* Compute the array index we look for. */
2857 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2858 .udiv (elt_size, TRUNC_DIV_EXPR);
2859 access_index += low_bound;
2860 if (index_type)
2861 access_index = access_index.ext (TYPE_PRECISION (index_type),
2862 TYPE_UNSIGNED (index_type));
2864 /* And offset within the access. */
2865 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2867 /* See if the array field is large enough to span whole access. We do not
2868 care to fold accesses spanning multiple array indexes. */
2869 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2870 return NULL_TREE;
2872 index = low_bound - double_int_one;
2873 if (index_type)
2874 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2876 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2878 /* Array constructor might explicitely set index, or specify range
2879 or leave index NULL meaning that it is next index after previous
2880 one. */
2881 if (cfield)
2883 if (TREE_CODE (cfield) == INTEGER_CST)
2884 max_index = index = tree_to_double_int (cfield);
2885 else
2887 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2888 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2889 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2892 else
2894 index += double_int_one;
2895 if (index_type)
2896 index = index.ext (TYPE_PRECISION (index_type),
2897 TYPE_UNSIGNED (index_type));
2898 max_index = index;
2901 /* Do we have match? */
2902 if (access_index.cmp (index, 1) >= 0
2903 && access_index.cmp (max_index, 1) <= 0)
2904 return fold_ctor_reference (type, cval, inner_offset, size,
2905 from_decl);
2907 /* When memory is not explicitely mentioned in constructor,
2908 it is 0 (or out of range). */
2909 return build_zero_cst (type);
2912 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2913 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2915 static tree
2916 fold_nonarray_ctor_reference (tree type, tree ctor,
2917 unsigned HOST_WIDE_INT offset,
2918 unsigned HOST_WIDE_INT size,
2919 tree from_decl)
2921 unsigned HOST_WIDE_INT cnt;
2922 tree cfield, cval;
2924 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2925 cval)
2927 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2928 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2929 tree field_size = DECL_SIZE (cfield);
2930 double_int bitoffset;
2931 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2932 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2933 double_int bitoffset_end, access_end;
2935 /* Variable sized objects in static constructors makes no sense,
2936 but field_size can be NULL for flexible array members. */
2937 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2938 && TREE_CODE (byte_offset) == INTEGER_CST
2939 && (field_size != NULL_TREE
2940 ? TREE_CODE (field_size) == INTEGER_CST
2941 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2943 /* Compute bit offset of the field. */
2944 bitoffset = tree_to_double_int (field_offset)
2945 + byte_offset_cst * bits_per_unit_cst;
2946 /* Compute bit offset where the field ends. */
2947 if (field_size != NULL_TREE)
2948 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2949 else
2950 bitoffset_end = double_int_zero;
2952 access_end = double_int::from_uhwi (offset)
2953 + double_int::from_uhwi (size);
2955 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2956 [BITOFFSET, BITOFFSET_END)? */
2957 if (access_end.cmp (bitoffset, 0) > 0
2958 && (field_size == NULL_TREE
2959 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2961 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2962 /* We do have overlap. Now see if field is large enough to
2963 cover the access. Give up for accesses spanning multiple
2964 fields. */
2965 if (access_end.cmp (bitoffset_end, 0) > 0)
2966 return NULL_TREE;
2967 if (double_int::from_uhwi (offset).slt (bitoffset))
2968 return NULL_TREE;
2969 return fold_ctor_reference (type, cval,
2970 inner_offset.to_uhwi (), size,
2971 from_decl);
2974 /* When memory is not explicitely mentioned in constructor, it is 0. */
2975 return build_zero_cst (type);
2978 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2979 to the memory at bit OFFSET. */
2981 static tree
2982 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2983 unsigned HOST_WIDE_INT size, tree from_decl)
2985 tree ret;
2987 /* We found the field with exact match. */
2988 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2989 && !offset)
2990 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2992 /* We are at the end of walk, see if we can view convert the
2993 result. */
2994 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2995 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2996 && operand_equal_p (TYPE_SIZE (type),
2997 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2999 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3000 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3001 if (ret)
3002 STRIP_NOPS (ret);
3003 return ret;
3005 if (TREE_CODE (ctor) == STRING_CST)
3006 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3007 if (TREE_CODE (ctor) == CONSTRUCTOR)
3010 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3011 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3012 return fold_array_ctor_reference (type, ctor, offset, size,
3013 from_decl);
3014 else
3015 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3016 from_decl);
3019 return NULL_TREE;
3022 /* Return the tree representing the element referenced by T if T is an
3023 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3024 names using VALUEIZE. Return NULL_TREE otherwise. */
3026 tree
3027 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3029 tree ctor, idx, base;
3030 HOST_WIDE_INT offset, size, max_size;
3031 tree tem;
3033 if (TREE_THIS_VOLATILE (t))
3034 return NULL_TREE;
3036 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3037 return get_symbol_constant_value (t);
3039 tem = fold_read_from_constant_string (t);
3040 if (tem)
3041 return tem;
3043 switch (TREE_CODE (t))
3045 case ARRAY_REF:
3046 case ARRAY_RANGE_REF:
3047 /* Constant indexes are handled well by get_base_constructor.
3048 Only special case variable offsets.
3049 FIXME: This code can't handle nested references with variable indexes
3050 (they will be handled only by iteration of ccp). Perhaps we can bring
3051 get_ref_base_and_extent here and make it use a valueize callback. */
3052 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3053 && valueize
3054 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3055 && TREE_CODE (idx) == INTEGER_CST)
3057 tree low_bound, unit_size;
3058 double_int doffset;
3060 /* If the resulting bit-offset is constant, track it. */
3061 if ((low_bound = array_ref_low_bound (t),
3062 TREE_CODE (low_bound) == INTEGER_CST)
3063 && (unit_size = array_ref_element_size (t),
3064 tree_fits_uhwi_p (unit_size))
3065 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3066 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3067 doffset.fits_shwi ()))
3069 offset = doffset.to_shwi ();
3070 offset *= TREE_INT_CST_LOW (unit_size);
3071 offset *= BITS_PER_UNIT;
3073 base = TREE_OPERAND (t, 0);
3074 ctor = get_base_constructor (base, &offset, valueize);
3075 /* Empty constructor. Always fold to 0. */
3076 if (ctor == error_mark_node)
3077 return build_zero_cst (TREE_TYPE (t));
3078 /* Out of bound array access. Value is undefined,
3079 but don't fold. */
3080 if (offset < 0)
3081 return NULL_TREE;
3082 /* We can not determine ctor. */
3083 if (!ctor)
3084 return NULL_TREE;
3085 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3086 TREE_INT_CST_LOW (unit_size)
3087 * BITS_PER_UNIT,
3088 base);
3091 /* Fallthru. */
3093 case COMPONENT_REF:
3094 case BIT_FIELD_REF:
3095 case TARGET_MEM_REF:
3096 case MEM_REF:
3097 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3098 ctor = get_base_constructor (base, &offset, valueize);
3100 /* Empty constructor. Always fold to 0. */
3101 if (ctor == error_mark_node)
3102 return build_zero_cst (TREE_TYPE (t));
3103 /* We do not know precise address. */
3104 if (max_size == -1 || max_size != size)
3105 return NULL_TREE;
3106 /* We can not determine ctor. */
3107 if (!ctor)
3108 return NULL_TREE;
3110 /* Out of bound array access. Value is undefined, but don't fold. */
3111 if (offset < 0)
3112 return NULL_TREE;
3114 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3115 base);
3117 case REALPART_EXPR:
3118 case IMAGPART_EXPR:
3120 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3121 if (c && TREE_CODE (c) == COMPLEX_CST)
3122 return fold_build1_loc (EXPR_LOCATION (t),
3123 TREE_CODE (t), TREE_TYPE (t), c);
3124 break;
3127 default:
3128 break;
3131 return NULL_TREE;
3134 tree
3135 fold_const_aggregate_ref (tree t)
3137 return fold_const_aggregate_ref_1 (t, NULL);
3140 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3141 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3142 KNOWN_BINFO carries the binfo describing the true type of
3143 OBJ_TYPE_REF_OBJECT(REF). */
3145 tree
3146 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3148 unsigned HOST_WIDE_INT offset, size;
3149 tree v, fn, vtable, init;
3151 vtable = v = BINFO_VTABLE (known_binfo);
3152 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3153 if (!v)
3154 return NULL_TREE;
3156 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3158 offset = tree_to_uhwi (TREE_OPERAND (v, 1)) * BITS_PER_UNIT;
3159 v = TREE_OPERAND (v, 0);
3161 else
3162 offset = 0;
3164 if (TREE_CODE (v) != ADDR_EXPR)
3165 return NULL_TREE;
3166 v = TREE_OPERAND (v, 0);
3168 if (TREE_CODE (v) != VAR_DECL
3169 || !DECL_VIRTUAL_P (v))
3170 return NULL_TREE;
3171 init = ctor_for_folding (v);
3173 /* The virtual tables should always be born with constructors.
3174 and we always should assume that they are avaialble for
3175 folding. At the moment we do not stream them in all cases,
3176 but it should never happen that ctor seem unreachable. */
3177 gcc_assert (init);
3178 if (init == error_mark_node)
3180 gcc_assert (in_lto_p);
3181 return NULL_TREE;
3183 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3184 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
3185 offset += token * size;
3186 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3187 offset, size, v);
3188 if (!fn || integer_zerop (fn))
3189 return NULL_TREE;
3190 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3191 || TREE_CODE (fn) == FDESC_EXPR);
3192 fn = TREE_OPERAND (fn, 0);
3193 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3195 /* When cgraph node is missing and function is not public, we cannot
3196 devirtualize. This can happen in WHOPR when the actual method
3197 ends up in other partition, because we found devirtualization
3198 possibility too late. */
3199 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3200 return NULL_TREE;
3202 /* Make sure we create a cgraph node for functions we'll reference.
3203 They can be non-existent if the reference comes from an entry
3204 of an external vtable for example. */
3205 cgraph_get_create_node (fn);
3207 return fn;
3210 /* Return true iff VAL is a gimple expression that is known to be
3211 non-negative. Restricted to floating-point inputs. */
3213 bool
3214 gimple_val_nonnegative_real_p (tree val)
3216 gimple def_stmt;
3218 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3220 /* Use existing logic for non-gimple trees. */
3221 if (tree_expr_nonnegative_p (val))
3222 return true;
3224 if (TREE_CODE (val) != SSA_NAME)
3225 return false;
3227 /* Currently we look only at the immediately defining statement
3228 to make this determination, since recursion on defining
3229 statements of operands can lead to quadratic behavior in the
3230 worst case. This is expected to catch almost all occurrences
3231 in practice. It would be possible to implement limited-depth
3232 recursion if important cases are lost. Alternatively, passes
3233 that need this information (such as the pow/powi lowering code
3234 in the cse_sincos pass) could be revised to provide it through
3235 dataflow propagation. */
3237 def_stmt = SSA_NAME_DEF_STMT (val);
3239 if (is_gimple_assign (def_stmt))
3241 tree op0, op1;
3243 /* See fold-const.c:tree_expr_nonnegative_p for additional
3244 cases that could be handled with recursion. */
3246 switch (gimple_assign_rhs_code (def_stmt))
3248 case ABS_EXPR:
3249 /* Always true for floating-point operands. */
3250 return true;
3252 case MULT_EXPR:
3253 /* True if the two operands are identical (since we are
3254 restricted to floating-point inputs). */
3255 op0 = gimple_assign_rhs1 (def_stmt);
3256 op1 = gimple_assign_rhs2 (def_stmt);
3258 if (op0 == op1
3259 || operand_equal_p (op0, op1, 0))
3260 return true;
3262 default:
3263 return false;
3266 else if (is_gimple_call (def_stmt))
3268 tree fndecl = gimple_call_fndecl (def_stmt);
3269 if (fndecl
3270 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3272 tree arg1;
3274 switch (DECL_FUNCTION_CODE (fndecl))
3276 CASE_FLT_FN (BUILT_IN_ACOS):
3277 CASE_FLT_FN (BUILT_IN_ACOSH):
3278 CASE_FLT_FN (BUILT_IN_CABS):
3279 CASE_FLT_FN (BUILT_IN_COSH):
3280 CASE_FLT_FN (BUILT_IN_ERFC):
3281 CASE_FLT_FN (BUILT_IN_EXP):
3282 CASE_FLT_FN (BUILT_IN_EXP10):
3283 CASE_FLT_FN (BUILT_IN_EXP2):
3284 CASE_FLT_FN (BUILT_IN_FABS):
3285 CASE_FLT_FN (BUILT_IN_FDIM):
3286 CASE_FLT_FN (BUILT_IN_HYPOT):
3287 CASE_FLT_FN (BUILT_IN_POW10):
3288 return true;
3290 CASE_FLT_FN (BUILT_IN_SQRT):
3291 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3292 nonnegative inputs. */
3293 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3294 return true;
3296 break;
3298 CASE_FLT_FN (BUILT_IN_POWI):
3299 /* True if the second argument is an even integer. */
3300 arg1 = gimple_call_arg (def_stmt, 1);
3302 if (TREE_CODE (arg1) == INTEGER_CST
3303 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3304 return true;
3306 break;
3308 CASE_FLT_FN (BUILT_IN_POW):
3309 /* True if the second argument is an even integer-valued
3310 real. */
3311 arg1 = gimple_call_arg (def_stmt, 1);
3313 if (TREE_CODE (arg1) == REAL_CST)
3315 REAL_VALUE_TYPE c;
3316 HOST_WIDE_INT n;
3318 c = TREE_REAL_CST (arg1);
3319 n = real_to_integer (&c);
3321 if ((n & 1) == 0)
3323 REAL_VALUE_TYPE cint;
3324 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3325 if (real_identical (&c, &cint))
3326 return true;
3330 break;
3332 default:
3333 return false;
3338 return false;
3341 /* Given a pointer value OP0, return a simplified version of an
3342 indirection through OP0, or NULL_TREE if no simplification is
3343 possible. Note that the resulting type may be different from
3344 the type pointed to in the sense that it is still compatible
3345 from the langhooks point of view. */
3347 tree
3348 gimple_fold_indirect_ref (tree t)
3350 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3351 tree sub = t;
3352 tree subtype;
3354 STRIP_NOPS (sub);
3355 subtype = TREE_TYPE (sub);
3356 if (!POINTER_TYPE_P (subtype))
3357 return NULL_TREE;
3359 if (TREE_CODE (sub) == ADDR_EXPR)
3361 tree op = TREE_OPERAND (sub, 0);
3362 tree optype = TREE_TYPE (op);
3363 /* *&p => p */
3364 if (useless_type_conversion_p (type, optype))
3365 return op;
3367 /* *(foo *)&fooarray => fooarray[0] */
3368 if (TREE_CODE (optype) == ARRAY_TYPE
3369 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3370 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3372 tree type_domain = TYPE_DOMAIN (optype);
3373 tree min_val = size_zero_node;
3374 if (type_domain && TYPE_MIN_VALUE (type_domain))
3375 min_val = TYPE_MIN_VALUE (type_domain);
3376 if (TREE_CODE (min_val) == INTEGER_CST)
3377 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3379 /* *(foo *)&complexfoo => __real__ complexfoo */
3380 else if (TREE_CODE (optype) == COMPLEX_TYPE
3381 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3382 return fold_build1 (REALPART_EXPR, type, op);
3383 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3384 else if (TREE_CODE (optype) == VECTOR_TYPE
3385 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3387 tree part_width = TYPE_SIZE (type);
3388 tree index = bitsize_int (0);
3389 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3393 /* *(p + CST) -> ... */
3394 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3395 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3397 tree addr = TREE_OPERAND (sub, 0);
3398 tree off = TREE_OPERAND (sub, 1);
3399 tree addrtype;
3401 STRIP_NOPS (addr);
3402 addrtype = TREE_TYPE (addr);
3404 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3405 if (TREE_CODE (addr) == ADDR_EXPR
3406 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3407 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3408 && tree_fits_uhwi_p (off))
3410 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
3411 tree part_width = TYPE_SIZE (type);
3412 unsigned HOST_WIDE_INT part_widthi
3413 = tree_to_shwi (part_width) / BITS_PER_UNIT;
3414 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3415 tree index = bitsize_int (indexi);
3416 if (offset / part_widthi
3417 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3418 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3419 part_width, index);
3422 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3423 if (TREE_CODE (addr) == ADDR_EXPR
3424 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3425 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3427 tree size = TYPE_SIZE_UNIT (type);
3428 if (tree_int_cst_equal (size, off))
3429 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3432 /* *(p + CST) -> MEM_REF <p, CST>. */
3433 if (TREE_CODE (addr) != ADDR_EXPR
3434 || DECL_P (TREE_OPERAND (addr, 0)))
3435 return fold_build2 (MEM_REF, type,
3436 addr,
3437 build_int_cst_wide (ptype,
3438 TREE_INT_CST_LOW (off),
3439 TREE_INT_CST_HIGH (off)));
3442 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3443 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3444 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3445 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3447 tree type_domain;
3448 tree min_val = size_zero_node;
3449 tree osub = sub;
3450 sub = gimple_fold_indirect_ref (sub);
3451 if (! sub)
3452 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3453 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3454 if (type_domain && TYPE_MIN_VALUE (type_domain))
3455 min_val = TYPE_MIN_VALUE (type_domain);
3456 if (TREE_CODE (min_val) == INTEGER_CST)
3457 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3460 return NULL_TREE;