Daily bump.
[official-gcc.git] / gcc / gimple-fold.c
blob5dc27e172d9227e924a10281789ef015616d83cd
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "dumpfile.h"
33 #include "bitmap.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
46 #include "tree-dfa.h"
47 #include "tree-ssa.h"
48 #include "tree-ssa-propagate.h"
49 #include "target.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
56 /* Return true when DECL can be referenced from current unit.
57 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
58 We can get declarations that are not possible to reference for various
59 reasons:
61 1) When analyzing C++ virtual tables.
62 C++ virtual tables do have known constructors even
63 when they are keyed to other compilation unit.
64 Those tables can contain pointers to methods and vars
65 in other units. Those methods have both STATIC and EXTERNAL
66 set.
67 2) In WHOPR mode devirtualization might lead to reference
68 to method that was partitioned elsehwere.
69 In this case we have static VAR_DECL or FUNCTION_DECL
70 that has no corresponding callgraph/varpool node
71 declaring the body.
72 3) COMDAT functions referred by external vtables that
73 we devirtualize only during final compilation stage.
74 At this time we already decided that we will not output
75 the function body and thus we can't reference the symbol
76 directly. */
78 static bool
79 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
81 varpool_node *vnode;
82 struct cgraph_node *node;
83 symtab_node *snode;
85 if (DECL_ABSTRACT (decl))
86 return false;
88 /* We are concerned only about static/external vars and functions. */
89 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
90 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
91 return true;
93 /* Static objects can be referred only if they was not optimized out yet. */
94 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
96 snode = symtab_get_node (decl);
97 if (!snode)
98 return false;
99 node = dyn_cast <cgraph_node> (snode);
100 return !node || !node->global.inlined_to;
103 /* We will later output the initializer, so we can refer to it.
104 So we are concerned only when DECL comes from initializer of
105 external var. */
106 if (!from_decl
107 || TREE_CODE (from_decl) != VAR_DECL
108 || !DECL_EXTERNAL (from_decl)
109 || (flag_ltrans
110 && symtab_get_node (from_decl)->in_other_partition))
111 return true;
112 /* We are folding reference from external vtable. The vtable may reffer
113 to a symbol keyed to other compilation unit. The other compilation
114 unit may be in separate DSO and the symbol may be hidden. */
115 if (DECL_VISIBILITY_SPECIFIED (decl)
116 && DECL_EXTERNAL (decl)
117 && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
118 return false;
119 /* When function is public, we always can introduce new reference.
120 Exception are the COMDAT functions where introducing a direct
121 reference imply need to include function body in the curren tunit. */
122 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
123 return true;
124 /* We are not at ltrans stage; so don't worry about WHOPR.
125 Also when still gimplifying all referred comdat functions will be
126 produced.
128 As observed in PR20991 for already optimized out comdat virtual functions
129 it may be tempting to not necessarily give up because the copy will be
130 output elsewhere when corresponding vtable is output.
131 This is however not possible - ABI specify that COMDATs are output in
132 units where they are used and when the other unit was compiled with LTO
133 it is possible that vtable was kept public while the function itself
134 was privatized. */
135 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
136 return true;
138 /* OK we are seeing either COMDAT or static variable. In this case we must
139 check that the definition is still around so we can refer it. */
140 if (TREE_CODE (decl) == FUNCTION_DECL)
142 node = cgraph_get_node (decl);
143 /* Check that we still have function body and that we didn't took
144 the decision to eliminate offline copy of the function yet.
145 The second is important when devirtualization happens during final
146 compilation stage when making a new reference no longer makes callee
147 to be compiled. */
148 if (!node || !node->definition || node->global.inlined_to)
150 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
151 return false;
154 else if (TREE_CODE (decl) == VAR_DECL)
156 vnode = varpool_get_node (decl);
157 if (!vnode || !vnode->definition)
159 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
160 return false;
163 return true;
166 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
167 acceptable form for is_gimple_min_invariant.
168 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
170 tree
171 canonicalize_constructor_val (tree cval, tree from_decl)
173 tree orig_cval = cval;
174 STRIP_NOPS (cval);
175 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
176 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
178 tree ptr = TREE_OPERAND (cval, 0);
179 if (is_gimple_min_invariant (ptr))
180 cval = build1_loc (EXPR_LOCATION (cval),
181 ADDR_EXPR, TREE_TYPE (ptr),
182 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
183 ptr,
184 fold_convert (ptr_type_node,
185 TREE_OPERAND (cval, 1))));
187 if (TREE_CODE (cval) == ADDR_EXPR)
189 tree base = NULL_TREE;
190 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
192 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
193 if (base)
194 TREE_OPERAND (cval, 0) = base;
196 else
197 base = get_base_address (TREE_OPERAND (cval, 0));
198 if (!base)
199 return NULL_TREE;
201 if ((TREE_CODE (base) == VAR_DECL
202 || TREE_CODE (base) == FUNCTION_DECL)
203 && !can_refer_decl_in_current_unit_p (base, from_decl))
204 return NULL_TREE;
205 if (TREE_CODE (base) == VAR_DECL)
206 TREE_ADDRESSABLE (base) = 1;
207 else if (TREE_CODE (base) == FUNCTION_DECL)
209 /* Make sure we create a cgraph node for functions we'll reference.
210 They can be non-existent if the reference comes from an entry
211 of an external vtable for example. */
212 cgraph_get_create_node (base);
214 /* Fixup types in global initializers. */
215 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
216 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
218 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
219 cval = fold_convert (TREE_TYPE (orig_cval), cval);
220 return cval;
222 if (TREE_OVERFLOW_P (cval))
223 return drop_tree_overflow (cval);
224 return orig_cval;
227 /* If SYM is a constant variable with known value, return the value.
228 NULL_TREE is returned otherwise. */
230 tree
231 get_symbol_constant_value (tree sym)
233 tree val = ctor_for_folding (sym);
234 if (val != error_mark_node)
236 if (val)
238 val = canonicalize_constructor_val (unshare_expr (val), sym);
239 if (val && is_gimple_min_invariant (val))
240 return val;
241 else
242 return NULL_TREE;
244 /* Variables declared 'const' without an initializer
245 have zero as the initializer if they may not be
246 overridden at link or run time. */
247 if (!val
248 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
249 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
250 return build_zero_cst (TREE_TYPE (sym));
253 return NULL_TREE;
258 /* Subroutine of fold_stmt. We perform several simplifications of the
259 memory reference tree EXPR and make sure to re-gimplify them properly
260 after propagation of constant addresses. IS_LHS is true if the
261 reference is supposed to be an lvalue. */
263 static tree
264 maybe_fold_reference (tree expr, bool is_lhs)
266 tree *t = &expr;
267 tree result;
269 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
270 || TREE_CODE (expr) == REALPART_EXPR
271 || TREE_CODE (expr) == IMAGPART_EXPR)
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
273 return fold_unary_loc (EXPR_LOCATION (expr),
274 TREE_CODE (expr),
275 TREE_TYPE (expr),
276 TREE_OPERAND (expr, 0));
277 else if (TREE_CODE (expr) == BIT_FIELD_REF
278 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
279 return fold_ternary_loc (EXPR_LOCATION (expr),
280 TREE_CODE (expr),
281 TREE_TYPE (expr),
282 TREE_OPERAND (expr, 0),
283 TREE_OPERAND (expr, 1),
284 TREE_OPERAND (expr, 2));
286 while (handled_component_p (*t))
287 t = &TREE_OPERAND (*t, 0);
289 /* Canonicalize MEM_REFs invariant address operand. Do this first
290 to avoid feeding non-canonical MEM_REFs elsewhere. */
291 if (TREE_CODE (*t) == MEM_REF
292 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
294 bool volatile_p = TREE_THIS_VOLATILE (*t);
295 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
296 TREE_OPERAND (*t, 0),
297 TREE_OPERAND (*t, 1));
298 if (tem)
300 TREE_THIS_VOLATILE (tem) = volatile_p;
301 *t = tem;
302 tem = maybe_fold_reference (expr, is_lhs);
303 if (tem)
304 return tem;
305 return expr;
309 if (!is_lhs
310 && (result = fold_const_aggregate_ref (expr))
311 && is_gimple_min_invariant (result))
312 return result;
314 /* Fold back MEM_REFs to reference trees. */
315 if (TREE_CODE (*t) == MEM_REF
316 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
317 && integer_zerop (TREE_OPERAND (*t, 1))
318 && (TREE_THIS_VOLATILE (*t)
319 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
320 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
321 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
322 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
323 /* We have to look out here to not drop a required conversion
324 from the rhs to the lhs if is_lhs, but we don't have the
325 rhs here to verify that. Thus require strict type
326 compatibility. */
327 && types_compatible_p (TREE_TYPE (*t),
328 TREE_TYPE (TREE_OPERAND
329 (TREE_OPERAND (*t, 0), 0))))
331 tree tem;
332 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
333 tem = maybe_fold_reference (expr, is_lhs);
334 if (tem)
335 return tem;
336 return expr;
338 else if (TREE_CODE (*t) == TARGET_MEM_REF)
340 tree tem = maybe_fold_tmr (*t);
341 if (tem)
343 *t = tem;
344 tem = maybe_fold_reference (expr, is_lhs);
345 if (tem)
346 return tem;
347 return expr;
351 return NULL_TREE;
355 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
356 replacement rhs for the statement or NULL_TREE if no simplification
357 could be made. It is assumed that the operands have been previously
358 folded. */
360 static tree
361 fold_gimple_assign (gimple_stmt_iterator *si)
363 gimple stmt = gsi_stmt (*si);
364 enum tree_code subcode = gimple_assign_rhs_code (stmt);
365 location_t loc = gimple_location (stmt);
367 tree result = NULL_TREE;
369 switch (get_gimple_rhs_class (subcode))
371 case GIMPLE_SINGLE_RHS:
373 tree rhs = gimple_assign_rhs1 (stmt);
375 if (REFERENCE_CLASS_P (rhs))
376 return maybe_fold_reference (rhs, false);
378 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
380 tree val = OBJ_TYPE_REF_EXPR (rhs);
381 if (is_gimple_min_invariant (val))
382 return val;
383 else if (flag_devirtualize && virtual_method_call_p (val))
385 bool final;
386 vec <cgraph_node *>targets
387 = possible_polymorphic_call_targets (val, &final);
388 if (final && targets.length () <= 1)
390 tree fndecl;
391 if (targets.length () == 1)
392 fndecl = targets[0]->decl;
393 else
394 fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
395 val = fold_convert (TREE_TYPE (val), fndecl);
396 STRIP_USELESS_TYPE_CONVERSION (val);
397 return val;
402 else if (TREE_CODE (rhs) == ADDR_EXPR)
404 tree ref = TREE_OPERAND (rhs, 0);
405 tree tem = maybe_fold_reference (ref, true);
406 if (tem
407 && TREE_CODE (tem) == MEM_REF
408 && integer_zerop (TREE_OPERAND (tem, 1)))
409 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
410 else if (tem)
411 result = fold_convert (TREE_TYPE (rhs),
412 build_fold_addr_expr_loc (loc, tem));
413 else if (TREE_CODE (ref) == MEM_REF
414 && integer_zerop (TREE_OPERAND (ref, 1)))
415 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
418 else if (TREE_CODE (rhs) == CONSTRUCTOR
419 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
420 && (CONSTRUCTOR_NELTS (rhs)
421 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
423 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
424 unsigned i;
425 tree val;
427 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
428 if (TREE_CODE (val) != INTEGER_CST
429 && TREE_CODE (val) != REAL_CST
430 && TREE_CODE (val) != FIXED_CST)
431 return NULL_TREE;
433 return build_vector_from_ctor (TREE_TYPE (rhs),
434 CONSTRUCTOR_ELTS (rhs));
437 else if (DECL_P (rhs))
438 return get_symbol_constant_value (rhs);
440 /* If we couldn't fold the RHS, hand over to the generic
441 fold routines. */
442 if (result == NULL_TREE)
443 result = fold (rhs);
445 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
446 that may have been added by fold, and "useless" type
447 conversions that might now be apparent due to propagation. */
448 STRIP_USELESS_TYPE_CONVERSION (result);
450 if (result != rhs && valid_gimple_rhs_p (result))
451 return result;
453 return NULL_TREE;
455 break;
457 case GIMPLE_UNARY_RHS:
459 tree rhs = gimple_assign_rhs1 (stmt);
461 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
462 if (result)
464 /* If the operation was a conversion do _not_ mark a
465 resulting constant with TREE_OVERFLOW if the original
466 constant was not. These conversions have implementation
467 defined behavior and retaining the TREE_OVERFLOW flag
468 here would confuse later passes such as VRP. */
469 if (CONVERT_EXPR_CODE_P (subcode)
470 && TREE_CODE (result) == INTEGER_CST
471 && TREE_CODE (rhs) == INTEGER_CST)
472 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
474 STRIP_USELESS_TYPE_CONVERSION (result);
475 if (valid_gimple_rhs_p (result))
476 return result;
479 break;
481 case GIMPLE_BINARY_RHS:
482 /* Try to canonicalize for boolean-typed X the comparisons
483 X == 0, X == 1, X != 0, and X != 1. */
484 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
485 || gimple_assign_rhs_code (stmt) == NE_EXPR)
487 tree lhs = gimple_assign_lhs (stmt);
488 tree op1 = gimple_assign_rhs1 (stmt);
489 tree op2 = gimple_assign_rhs2 (stmt);
490 tree type = TREE_TYPE (op1);
492 /* Check whether the comparison operands are of the same boolean
493 type as the result type is.
494 Check that second operand is an integer-constant with value
495 one or zero. */
496 if (TREE_CODE (op2) == INTEGER_CST
497 && (integer_zerop (op2) || integer_onep (op2))
498 && useless_type_conversion_p (TREE_TYPE (lhs), type))
500 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
501 bool is_logical_not = false;
503 /* X == 0 and X != 1 is a logical-not.of X
504 X == 1 and X != 0 is X */
505 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
506 || (cmp_code == NE_EXPR && integer_onep (op2)))
507 is_logical_not = true;
509 if (is_logical_not == false)
510 result = op1;
511 /* Only for one-bit precision typed X the transformation
512 !X -> ~X is valied. */
513 else if (TYPE_PRECISION (type) == 1)
514 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
515 type, op1);
516 /* Otherwise we use !X -> X ^ 1. */
517 else
518 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
519 type, op1, build_int_cst (type, 1));
524 if (!result)
525 result = fold_binary_loc (loc, subcode,
526 TREE_TYPE (gimple_assign_lhs (stmt)),
527 gimple_assign_rhs1 (stmt),
528 gimple_assign_rhs2 (stmt));
530 if (result)
532 STRIP_USELESS_TYPE_CONVERSION (result);
533 if (valid_gimple_rhs_p (result))
534 return result;
536 break;
538 case GIMPLE_TERNARY_RHS:
539 /* Try to fold a conditional expression. */
540 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
542 tree op0 = gimple_assign_rhs1 (stmt);
543 tree tem;
544 bool set = false;
545 location_t cond_loc = gimple_location (stmt);
547 if (COMPARISON_CLASS_P (op0))
549 fold_defer_overflow_warnings ();
550 tem = fold_binary_loc (cond_loc,
551 TREE_CODE (op0), TREE_TYPE (op0),
552 TREE_OPERAND (op0, 0),
553 TREE_OPERAND (op0, 1));
554 /* This is actually a conditional expression, not a GIMPLE
555 conditional statement, however, the valid_gimple_rhs_p
556 test still applies. */
557 set = (tem && is_gimple_condexpr (tem)
558 && valid_gimple_rhs_p (tem));
559 fold_undefer_overflow_warnings (set, stmt, 0);
561 else if (is_gimple_min_invariant (op0))
563 tem = op0;
564 set = true;
566 else
567 return NULL_TREE;
569 if (set)
570 result = fold_build3_loc (cond_loc, COND_EXPR,
571 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
572 gimple_assign_rhs2 (stmt),
573 gimple_assign_rhs3 (stmt));
576 if (!result)
577 result = fold_ternary_loc (loc, subcode,
578 TREE_TYPE (gimple_assign_lhs (stmt)),
579 gimple_assign_rhs1 (stmt),
580 gimple_assign_rhs2 (stmt),
581 gimple_assign_rhs3 (stmt));
583 if (result)
585 STRIP_USELESS_TYPE_CONVERSION (result);
586 if (valid_gimple_rhs_p (result))
587 return result;
589 break;
591 case GIMPLE_INVALID_RHS:
592 gcc_unreachable ();
595 return NULL_TREE;
598 /* Attempt to fold a conditional statement. Return true if any changes were
599 made. We only attempt to fold the condition expression, and do not perform
600 any transformation that would require alteration of the cfg. It is
601 assumed that the operands have been previously folded. */
603 static bool
604 fold_gimple_cond (gimple stmt)
606 tree result = fold_binary_loc (gimple_location (stmt),
607 gimple_cond_code (stmt),
608 boolean_type_node,
609 gimple_cond_lhs (stmt),
610 gimple_cond_rhs (stmt));
612 if (result)
614 STRIP_USELESS_TYPE_CONVERSION (result);
615 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
617 gimple_cond_set_condition_from_tree (stmt, result);
618 return true;
622 return false;
625 /* Convert EXPR into a GIMPLE value suitable for substitution on the
626 RHS of an assignment. Insert the necessary statements before
627 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
628 is replaced. If the call is expected to produces a result, then it
629 is replaced by an assignment of the new RHS to the result variable.
630 If the result is to be ignored, then the call is replaced by a
631 GIMPLE_NOP. A proper VDEF chain is retained by making the first
632 VUSE and the last VDEF of the whole sequence be the same as the replaced
633 statement and using new SSA names for stores in between. */
635 void
636 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
638 tree lhs;
639 gimple stmt, new_stmt;
640 gimple_stmt_iterator i;
641 gimple_seq stmts = NULL;
642 gimple laststore;
643 tree reaching_vuse;
645 stmt = gsi_stmt (*si_p);
647 gcc_assert (is_gimple_call (stmt));
649 push_gimplify_context (gimple_in_ssa_p (cfun));
651 lhs = gimple_call_lhs (stmt);
652 if (lhs == NULL_TREE)
654 gimplify_and_add (expr, &stmts);
655 /* We can end up with folding a memcpy of an empty class assignment
656 which gets optimized away by C++ gimplification. */
657 if (gimple_seq_empty_p (stmts))
659 pop_gimplify_context (NULL);
660 if (gimple_in_ssa_p (cfun))
662 unlink_stmt_vdef (stmt);
663 release_defs (stmt);
665 gsi_replace (si_p, gimple_build_nop (), true);
666 return;
669 else
671 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
672 new_stmt = gimple_build_assign (lhs, tmp);
673 i = gsi_last (stmts);
674 gsi_insert_after_without_update (&i, new_stmt,
675 GSI_CONTINUE_LINKING);
678 pop_gimplify_context (NULL);
680 if (gimple_has_location (stmt))
681 annotate_all_with_location (stmts, gimple_location (stmt));
683 /* First iterate over the replacement statements backward, assigning
684 virtual operands to their defining statements. */
685 laststore = NULL;
686 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
688 new_stmt = gsi_stmt (i);
689 if ((gimple_assign_single_p (new_stmt)
690 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
691 || (is_gimple_call (new_stmt)
692 && (gimple_call_flags (new_stmt)
693 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
695 tree vdef;
696 if (!laststore)
697 vdef = gimple_vdef (stmt);
698 else
699 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
700 gimple_set_vdef (new_stmt, vdef);
701 if (vdef && TREE_CODE (vdef) == SSA_NAME)
702 SSA_NAME_DEF_STMT (vdef) = new_stmt;
703 laststore = new_stmt;
707 /* Second iterate over the statements forward, assigning virtual
708 operands to their uses. */
709 reaching_vuse = gimple_vuse (stmt);
710 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
712 new_stmt = gsi_stmt (i);
713 /* If the new statement possibly has a VUSE, update it with exact SSA
714 name we know will reach this one. */
715 if (gimple_has_mem_ops (new_stmt))
716 gimple_set_vuse (new_stmt, reaching_vuse);
717 gimple_set_modified (new_stmt, true);
718 if (gimple_vdef (new_stmt))
719 reaching_vuse = gimple_vdef (new_stmt);
722 /* If the new sequence does not do a store release the virtual
723 definition of the original statement. */
724 if (reaching_vuse
725 && reaching_vuse == gimple_vuse (stmt))
727 tree vdef = gimple_vdef (stmt);
728 if (vdef
729 && TREE_CODE (vdef) == SSA_NAME)
731 unlink_stmt_vdef (stmt);
732 release_ssa_name (vdef);
736 /* Finally replace the original statement with the sequence. */
737 gsi_replace_with_seq (si_p, stmts, false);
740 /* Return the string length, maximum string length or maximum value of
741 ARG in LENGTH.
742 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
743 is not NULL and, for TYPE == 0, its value is not equal to the length
744 we determine or if we are unable to determine the length or value,
745 return false. VISITED is a bitmap of visited variables.
746 TYPE is 0 if string length should be returned, 1 for maximum string
747 length and 2 for maximum value ARG can have. */
749 static bool
750 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
752 tree var, val;
753 gimple def_stmt;
755 if (TREE_CODE (arg) != SSA_NAME)
757 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
758 if (TREE_CODE (arg) == ADDR_EXPR
759 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
760 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
762 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
763 if (TREE_CODE (aop0) == INDIRECT_REF
764 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
765 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
766 length, visited, type);
769 if (type == 2)
771 val = arg;
772 if (TREE_CODE (val) != INTEGER_CST
773 || tree_int_cst_sgn (val) < 0)
774 return false;
776 else
777 val = c_strlen (arg, 1);
778 if (!val)
779 return false;
781 if (*length)
783 if (type > 0)
785 if (TREE_CODE (*length) != INTEGER_CST
786 || TREE_CODE (val) != INTEGER_CST)
787 return false;
789 if (tree_int_cst_lt (*length, val))
790 *length = val;
791 return true;
793 else if (simple_cst_equal (val, *length) != 1)
794 return false;
797 *length = val;
798 return true;
801 /* If ARG is registered for SSA update we cannot look at its defining
802 statement. */
803 if (name_registered_for_update_p (arg))
804 return false;
806 /* If we were already here, break the infinite cycle. */
807 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
808 return true;
810 var = arg;
811 def_stmt = SSA_NAME_DEF_STMT (var);
813 switch (gimple_code (def_stmt))
815 case GIMPLE_ASSIGN:
816 /* The RHS of the statement defining VAR must either have a
817 constant length or come from another SSA_NAME with a constant
818 length. */
819 if (gimple_assign_single_p (def_stmt)
820 || gimple_assign_unary_nop_p (def_stmt))
822 tree rhs = gimple_assign_rhs1 (def_stmt);
823 return get_maxval_strlen (rhs, length, visited, type);
825 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
827 tree op2 = gimple_assign_rhs2 (def_stmt);
828 tree op3 = gimple_assign_rhs3 (def_stmt);
829 return get_maxval_strlen (op2, length, visited, type)
830 && get_maxval_strlen (op3, length, visited, type);
832 return false;
834 case GIMPLE_PHI:
836 /* All the arguments of the PHI node must have the same constant
837 length. */
838 unsigned i;
840 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
842 tree arg = gimple_phi_arg (def_stmt, i)->def;
844 /* If this PHI has itself as an argument, we cannot
845 determine the string length of this argument. However,
846 if we can find a constant string length for the other
847 PHI args then we can still be sure that this is a
848 constant string length. So be optimistic and just
849 continue with the next argument. */
850 if (arg == gimple_phi_result (def_stmt))
851 continue;
853 if (!get_maxval_strlen (arg, length, visited, type))
854 return false;
857 return true;
859 default:
860 return false;
865 /* Fold builtin call in statement STMT. Returns a simplified tree.
866 We may return a non-constant expression, including another call
867 to a different function and with different arguments, e.g.,
868 substituting memcpy for strcpy when the string length is known.
869 Note that some builtins expand into inline code that may not
870 be valid in GIMPLE. Callers must take care. */
872 tree
873 gimple_fold_builtin (gimple stmt)
875 tree result, val[3];
876 tree callee, a;
877 int arg_idx, type;
878 bitmap visited;
879 bool ignore;
880 int nargs;
881 location_t loc = gimple_location (stmt);
883 ignore = (gimple_call_lhs (stmt) == NULL);
885 /* First try the generic builtin folder. If that succeeds, return the
886 result directly. */
887 result = fold_call_stmt (stmt, ignore);
888 if (result)
890 if (ignore)
891 STRIP_NOPS (result);
892 else
893 result = fold_convert (gimple_call_return_type (stmt), result);
894 return result;
897 /* Ignore MD builtins. */
898 callee = gimple_call_fndecl (stmt);
899 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
900 return NULL_TREE;
902 /* Give up for always_inline inline builtins until they are
903 inlined. */
904 if (avoid_folding_inline_builtin (callee))
905 return NULL_TREE;
907 /* If the builtin could not be folded, and it has no argument list,
908 we're done. */
909 nargs = gimple_call_num_args (stmt);
910 if (nargs == 0)
911 return NULL_TREE;
913 /* Limit the work only for builtins we know how to simplify. */
914 switch (DECL_FUNCTION_CODE (callee))
916 case BUILT_IN_STRLEN:
917 case BUILT_IN_FPUTS:
918 case BUILT_IN_FPUTS_UNLOCKED:
919 arg_idx = 0;
920 type = 0;
921 break;
922 case BUILT_IN_STRCPY:
923 case BUILT_IN_STRNCPY:
924 arg_idx = 1;
925 type = 0;
926 break;
927 case BUILT_IN_MEMCPY_CHK:
928 case BUILT_IN_MEMPCPY_CHK:
929 case BUILT_IN_MEMMOVE_CHK:
930 case BUILT_IN_MEMSET_CHK:
931 case BUILT_IN_STRNCPY_CHK:
932 case BUILT_IN_STPNCPY_CHK:
933 arg_idx = 2;
934 type = 2;
935 break;
936 case BUILT_IN_STRCPY_CHK:
937 case BUILT_IN_STPCPY_CHK:
938 arg_idx = 1;
939 type = 1;
940 break;
941 case BUILT_IN_SNPRINTF_CHK:
942 case BUILT_IN_VSNPRINTF_CHK:
943 arg_idx = 1;
944 type = 2;
945 break;
946 default:
947 return NULL_TREE;
950 if (arg_idx >= nargs)
951 return NULL_TREE;
953 /* Try to use the dataflow information gathered by the CCP process. */
954 visited = BITMAP_ALLOC (NULL);
955 bitmap_clear (visited);
957 memset (val, 0, sizeof (val));
958 a = gimple_call_arg (stmt, arg_idx);
959 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
960 val[arg_idx] = NULL_TREE;
962 BITMAP_FREE (visited);
964 result = NULL_TREE;
965 switch (DECL_FUNCTION_CODE (callee))
967 case BUILT_IN_STRLEN:
968 if (val[0] && nargs == 1)
970 tree new_val =
971 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
973 /* If the result is not a valid gimple value, or not a cast
974 of a valid gimple value, then we cannot use the result. */
975 if (is_gimple_val (new_val)
976 || (CONVERT_EXPR_P (new_val)
977 && is_gimple_val (TREE_OPERAND (new_val, 0))))
978 return new_val;
980 break;
982 case BUILT_IN_STRCPY:
983 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
984 result = fold_builtin_strcpy (loc, callee,
985 gimple_call_arg (stmt, 0),
986 gimple_call_arg (stmt, 1),
987 val[1]);
988 break;
990 case BUILT_IN_STRNCPY:
991 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
992 result = fold_builtin_strncpy (loc, callee,
993 gimple_call_arg (stmt, 0),
994 gimple_call_arg (stmt, 1),
995 gimple_call_arg (stmt, 2),
996 val[1]);
997 break;
999 case BUILT_IN_FPUTS:
1000 if (nargs == 2)
1001 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1002 gimple_call_arg (stmt, 1),
1003 ignore, false, val[0]);
1004 break;
1006 case BUILT_IN_FPUTS_UNLOCKED:
1007 if (nargs == 2)
1008 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1009 gimple_call_arg (stmt, 1),
1010 ignore, true, val[0]);
1011 break;
1013 case BUILT_IN_MEMCPY_CHK:
1014 case BUILT_IN_MEMPCPY_CHK:
1015 case BUILT_IN_MEMMOVE_CHK:
1016 case BUILT_IN_MEMSET_CHK:
1017 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1018 result = fold_builtin_memory_chk (loc, callee,
1019 gimple_call_arg (stmt, 0),
1020 gimple_call_arg (stmt, 1),
1021 gimple_call_arg (stmt, 2),
1022 gimple_call_arg (stmt, 3),
1023 val[2], ignore,
1024 DECL_FUNCTION_CODE (callee));
1025 break;
1027 case BUILT_IN_STRCPY_CHK:
1028 case BUILT_IN_STPCPY_CHK:
1029 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1030 result = fold_builtin_stxcpy_chk (loc, callee,
1031 gimple_call_arg (stmt, 0),
1032 gimple_call_arg (stmt, 1),
1033 gimple_call_arg (stmt, 2),
1034 val[1], ignore,
1035 DECL_FUNCTION_CODE (callee));
1036 break;
1038 case BUILT_IN_STRNCPY_CHK:
1039 case BUILT_IN_STPNCPY_CHK:
1040 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1041 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1042 gimple_call_arg (stmt, 1),
1043 gimple_call_arg (stmt, 2),
1044 gimple_call_arg (stmt, 3),
1045 val[2], ignore,
1046 DECL_FUNCTION_CODE (callee));
1047 break;
1049 case BUILT_IN_SNPRINTF_CHK:
1050 case BUILT_IN_VSNPRINTF_CHK:
1051 if (val[1] && is_gimple_val (val[1]))
1052 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1053 DECL_FUNCTION_CODE (callee));
1054 break;
1056 default:
1057 gcc_unreachable ();
1060 if (result && ignore)
1061 result = fold_ignored_result (result);
1062 return result;
1066 /* Return a binfo to be used for devirtualization of calls based on an object
1067 represented by a declaration (i.e. a global or automatically allocated one)
1068 or NULL if it cannot be found or is not safe. CST is expected to be an
1069 ADDR_EXPR of such object or the function will return NULL. Currently it is
1070 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1071 EXPECTED_TYPE is type of the class virtual belongs to. */
1073 tree
1074 gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type)
1076 HOST_WIDE_INT offset, size, max_size;
1077 tree base, type, binfo;
1078 bool last_artificial = false;
1080 if (!flag_devirtualize
1081 || TREE_CODE (cst) != ADDR_EXPR
1082 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1083 return NULL_TREE;
1085 cst = TREE_OPERAND (cst, 0);
1086 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1087 type = TREE_TYPE (base);
1088 if (!DECL_P (base)
1089 || max_size == -1
1090 || max_size != size
1091 || TREE_CODE (type) != RECORD_TYPE)
1092 return NULL_TREE;
1094 /* Find the sub-object the constant actually refers to and mark whether it is
1095 an artificial one (as opposed to a user-defined one). */
1096 while (true)
1098 HOST_WIDE_INT pos, size;
1099 tree fld;
1101 if (types_same_for_odr (type, expected_type))
1102 break;
1103 if (offset < 0)
1104 return NULL_TREE;
1106 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1108 if (TREE_CODE (fld) != FIELD_DECL)
1109 continue;
1111 pos = int_bit_position (fld);
1112 size = tree_to_uhwi (DECL_SIZE (fld));
1113 if (pos <= offset && (pos + size) > offset)
1114 break;
1116 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1117 return NULL_TREE;
1119 last_artificial = DECL_ARTIFICIAL (fld);
1120 type = TREE_TYPE (fld);
1121 offset -= pos;
1123 /* Artificial sub-objects are ancestors, we do not want to use them for
1124 devirtualization, at least not here. */
1125 if (last_artificial)
1126 return NULL_TREE;
1127 binfo = TYPE_BINFO (type);
1128 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1129 return NULL_TREE;
1130 else
1131 return binfo;
1134 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1135 The statement may be replaced by another statement, e.g., if the call
1136 simplifies to a constant value. Return true if any changes were made.
1137 It is assumed that the operands have been previously folded. */
1139 static bool
1140 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1142 gimple stmt = gsi_stmt (*gsi);
1143 tree callee;
1144 bool changed = false;
1145 unsigned i;
1147 /* Fold *& in call arguments. */
1148 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1149 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1151 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1152 if (tmp)
1154 gimple_call_set_arg (stmt, i, tmp);
1155 changed = true;
1159 /* Check for virtual calls that became direct calls. */
1160 callee = gimple_call_fn (stmt);
1161 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1163 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1165 if (dump_file && virtual_method_call_p (callee)
1166 && !possible_polymorphic_call_target_p
1167 (callee, cgraph_get_node (gimple_call_addr_fndecl
1168 (OBJ_TYPE_REF_EXPR (callee)))))
1170 fprintf (dump_file,
1171 "Type inheritance inconsistent devirtualization of ");
1172 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1173 fprintf (dump_file, " to ");
1174 print_generic_expr (dump_file, callee, TDF_SLIM);
1175 fprintf (dump_file, "\n");
1178 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1179 changed = true;
1181 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
1183 bool final;
1184 vec <cgraph_node *>targets
1185 = possible_polymorphic_call_targets (callee, &final);
1186 if (final && targets.length () <= 1)
1188 tree lhs = gimple_call_lhs (stmt);
1189 if (targets.length () == 1)
1191 gimple_call_set_fndecl (stmt, targets[0]->decl);
1192 changed = true;
1193 /* If the call becomes noreturn, remove the lhs. */
1194 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
1196 if (TREE_CODE (lhs) == SSA_NAME)
1198 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1199 tree def = get_or_create_ssa_default_def (cfun, var);
1200 gimple new_stmt = gimple_build_assign (lhs, def);
1201 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1203 gimple_call_set_lhs (stmt, NULL_TREE);
1206 else
1208 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
1209 gimple new_stmt = gimple_build_call (fndecl, 0);
1210 gimple_set_location (new_stmt, gimple_location (stmt));
1211 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1213 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1214 tree def = get_or_create_ssa_default_def (cfun, var);
1215 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1216 update_call_from_tree (gsi, def);
1218 else
1219 gsi_replace (gsi, new_stmt, true);
1220 return true;
1226 if (inplace)
1227 return changed;
1229 /* Check for builtins that CCP can handle using information not
1230 available in the generic fold routines. */
1231 if (gimple_call_builtin_p (stmt))
1233 tree result = gimple_fold_builtin (stmt);
1234 if (result)
1236 if (!update_call_from_tree (gsi, result))
1237 gimplify_and_update_call_from_tree (gsi, result);
1238 changed = true;
1240 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
1241 changed |= targetm.gimple_fold_builtin (gsi);
1244 return changed;
1247 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1248 distinguishes both cases. */
1250 static bool
1251 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1253 bool changed = false;
1254 gimple stmt = gsi_stmt (*gsi);
1255 unsigned i;
1257 /* Fold the main computation performed by the statement. */
1258 switch (gimple_code (stmt))
1260 case GIMPLE_ASSIGN:
1262 unsigned old_num_ops = gimple_num_ops (stmt);
1263 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1264 tree lhs = gimple_assign_lhs (stmt);
1265 tree new_rhs;
1266 /* First canonicalize operand order. This avoids building new
1267 trees if this is the only thing fold would later do. */
1268 if ((commutative_tree_code (subcode)
1269 || commutative_ternary_tree_code (subcode))
1270 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1271 gimple_assign_rhs2 (stmt), false))
1273 tree tem = gimple_assign_rhs1 (stmt);
1274 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1275 gimple_assign_set_rhs2 (stmt, tem);
1276 changed = true;
1278 new_rhs = fold_gimple_assign (gsi);
1279 if (new_rhs
1280 && !useless_type_conversion_p (TREE_TYPE (lhs),
1281 TREE_TYPE (new_rhs)))
1282 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1283 if (new_rhs
1284 && (!inplace
1285 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1287 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1288 changed = true;
1290 break;
1293 case GIMPLE_COND:
1294 changed |= fold_gimple_cond (stmt);
1295 break;
1297 case GIMPLE_CALL:
1298 changed |= gimple_fold_call (gsi, inplace);
1299 break;
1301 case GIMPLE_ASM:
1302 /* Fold *& in asm operands. */
1304 size_t noutputs;
1305 const char **oconstraints;
1306 const char *constraint;
1307 bool allows_mem, allows_reg;
1309 noutputs = gimple_asm_noutputs (stmt);
1310 oconstraints = XALLOCAVEC (const char *, noutputs);
1312 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1314 tree link = gimple_asm_output_op (stmt, i);
1315 tree op = TREE_VALUE (link);
1316 oconstraints[i]
1317 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1318 if (REFERENCE_CLASS_P (op)
1319 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1321 TREE_VALUE (link) = op;
1322 changed = true;
1325 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1327 tree link = gimple_asm_input_op (stmt, i);
1328 tree op = TREE_VALUE (link);
1329 constraint
1330 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1331 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1332 oconstraints, &allows_mem, &allows_reg);
1333 if (REFERENCE_CLASS_P (op)
1334 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1335 != NULL_TREE)
1337 TREE_VALUE (link) = op;
1338 changed = true;
1342 break;
1344 case GIMPLE_DEBUG:
1345 if (gimple_debug_bind_p (stmt))
1347 tree val = gimple_debug_bind_get_value (stmt);
1348 if (val
1349 && REFERENCE_CLASS_P (val))
1351 tree tem = maybe_fold_reference (val, false);
1352 if (tem)
1354 gimple_debug_bind_set_value (stmt, tem);
1355 changed = true;
1358 else if (val
1359 && TREE_CODE (val) == ADDR_EXPR)
1361 tree ref = TREE_OPERAND (val, 0);
1362 tree tem = maybe_fold_reference (ref, false);
1363 if (tem)
1365 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1366 gimple_debug_bind_set_value (stmt, tem);
1367 changed = true;
1371 break;
1373 default:;
1376 stmt = gsi_stmt (*gsi);
1378 /* Fold *& on the lhs. */
1379 if (gimple_has_lhs (stmt))
1381 tree lhs = gimple_get_lhs (stmt);
1382 if (lhs && REFERENCE_CLASS_P (lhs))
1384 tree new_lhs = maybe_fold_reference (lhs, true);
1385 if (new_lhs)
1387 gimple_set_lhs (stmt, new_lhs);
1388 changed = true;
1393 return changed;
1396 /* Fold the statement pointed to by GSI. In some cases, this function may
1397 replace the whole statement with a new one. Returns true iff folding
1398 makes any changes.
1399 The statement pointed to by GSI should be in valid gimple form but may
1400 be in unfolded state as resulting from for example constant propagation
1401 which can produce *&x = 0. */
1403 bool
1404 fold_stmt (gimple_stmt_iterator *gsi)
1406 return fold_stmt_1 (gsi, false);
1409 /* Perform the minimal folding on statement *GSI. Only operations like
1410 *&x created by constant propagation are handled. The statement cannot
1411 be replaced with a new one. Return true if the statement was
1412 changed, false otherwise.
1413 The statement *GSI should be in valid gimple form but may
1414 be in unfolded state as resulting from for example constant propagation
1415 which can produce *&x = 0. */
1417 bool
1418 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1420 gimple stmt = gsi_stmt (*gsi);
1421 bool changed = fold_stmt_1 (gsi, true);
1422 gcc_assert (gsi_stmt (*gsi) == stmt);
1423 return changed;
1426 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1427 if EXPR is null or we don't know how.
1428 If non-null, the result always has boolean type. */
1430 static tree
1431 canonicalize_bool (tree expr, bool invert)
1433 if (!expr)
1434 return NULL_TREE;
1435 else if (invert)
1437 if (integer_nonzerop (expr))
1438 return boolean_false_node;
1439 else if (integer_zerop (expr))
1440 return boolean_true_node;
1441 else if (TREE_CODE (expr) == SSA_NAME)
1442 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1443 build_int_cst (TREE_TYPE (expr), 0));
1444 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1445 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1446 boolean_type_node,
1447 TREE_OPERAND (expr, 0),
1448 TREE_OPERAND (expr, 1));
1449 else
1450 return NULL_TREE;
1452 else
1454 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1455 return expr;
1456 if (integer_nonzerop (expr))
1457 return boolean_true_node;
1458 else if (integer_zerop (expr))
1459 return boolean_false_node;
1460 else if (TREE_CODE (expr) == SSA_NAME)
1461 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1462 build_int_cst (TREE_TYPE (expr), 0));
1463 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1464 return fold_build2 (TREE_CODE (expr),
1465 boolean_type_node,
1466 TREE_OPERAND (expr, 0),
1467 TREE_OPERAND (expr, 1));
1468 else
1469 return NULL_TREE;
1473 /* Check to see if a boolean expression EXPR is logically equivalent to the
1474 comparison (OP1 CODE OP2). Check for various identities involving
1475 SSA_NAMEs. */
1477 static bool
1478 same_bool_comparison_p (const_tree expr, enum tree_code code,
1479 const_tree op1, const_tree op2)
1481 gimple s;
1483 /* The obvious case. */
1484 if (TREE_CODE (expr) == code
1485 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1486 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1487 return true;
1489 /* Check for comparing (name, name != 0) and the case where expr
1490 is an SSA_NAME with a definition matching the comparison. */
1491 if (TREE_CODE (expr) == SSA_NAME
1492 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1494 if (operand_equal_p (expr, op1, 0))
1495 return ((code == NE_EXPR && integer_zerop (op2))
1496 || (code == EQ_EXPR && integer_nonzerop (op2)));
1497 s = SSA_NAME_DEF_STMT (expr);
1498 if (is_gimple_assign (s)
1499 && gimple_assign_rhs_code (s) == code
1500 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1501 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1502 return true;
1505 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1506 of name is a comparison, recurse. */
1507 if (TREE_CODE (op1) == SSA_NAME
1508 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1510 s = SSA_NAME_DEF_STMT (op1);
1511 if (is_gimple_assign (s)
1512 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1514 enum tree_code c = gimple_assign_rhs_code (s);
1515 if ((c == NE_EXPR && integer_zerop (op2))
1516 || (c == EQ_EXPR && integer_nonzerop (op2)))
1517 return same_bool_comparison_p (expr, c,
1518 gimple_assign_rhs1 (s),
1519 gimple_assign_rhs2 (s));
1520 if ((c == EQ_EXPR && integer_zerop (op2))
1521 || (c == NE_EXPR && integer_nonzerop (op2)))
1522 return same_bool_comparison_p (expr,
1523 invert_tree_comparison (c, false),
1524 gimple_assign_rhs1 (s),
1525 gimple_assign_rhs2 (s));
1528 return false;
1531 /* Check to see if two boolean expressions OP1 and OP2 are logically
1532 equivalent. */
1534 static bool
1535 same_bool_result_p (const_tree op1, const_tree op2)
1537 /* Simple cases first. */
1538 if (operand_equal_p (op1, op2, 0))
1539 return true;
1541 /* Check the cases where at least one of the operands is a comparison.
1542 These are a bit smarter than operand_equal_p in that they apply some
1543 identifies on SSA_NAMEs. */
1544 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1545 && same_bool_comparison_p (op1, TREE_CODE (op2),
1546 TREE_OPERAND (op2, 0),
1547 TREE_OPERAND (op2, 1)))
1548 return true;
1549 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1550 && same_bool_comparison_p (op2, TREE_CODE (op1),
1551 TREE_OPERAND (op1, 0),
1552 TREE_OPERAND (op1, 1)))
1553 return true;
1555 /* Default case. */
1556 return false;
1559 /* Forward declarations for some mutually recursive functions. */
1561 static tree
1562 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1563 enum tree_code code2, tree op2a, tree op2b);
1564 static tree
1565 and_var_with_comparison (tree var, bool invert,
1566 enum tree_code code2, tree op2a, tree op2b);
1567 static tree
1568 and_var_with_comparison_1 (gimple stmt,
1569 enum tree_code code2, tree op2a, tree op2b);
1570 static tree
1571 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1572 enum tree_code code2, tree op2a, tree op2b);
1573 static tree
1574 or_var_with_comparison (tree var, bool invert,
1575 enum tree_code code2, tree op2a, tree op2b);
1576 static tree
1577 or_var_with_comparison_1 (gimple stmt,
1578 enum tree_code code2, tree op2a, tree op2b);
1580 /* Helper function for and_comparisons_1: try to simplify the AND of the
1581 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1582 If INVERT is true, invert the value of the VAR before doing the AND.
1583 Return NULL_EXPR if we can't simplify this to a single expression. */
1585 static tree
1586 and_var_with_comparison (tree var, bool invert,
1587 enum tree_code code2, tree op2a, tree op2b)
1589 tree t;
1590 gimple stmt = SSA_NAME_DEF_STMT (var);
1592 /* We can only deal with variables whose definitions are assignments. */
1593 if (!is_gimple_assign (stmt))
1594 return NULL_TREE;
1596 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1597 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1598 Then we only have to consider the simpler non-inverted cases. */
1599 if (invert)
1600 t = or_var_with_comparison_1 (stmt,
1601 invert_tree_comparison (code2, false),
1602 op2a, op2b);
1603 else
1604 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1605 return canonicalize_bool (t, invert);
1608 /* Try to simplify the AND of the ssa variable defined by the assignment
1609 STMT with the comparison specified by (OP2A CODE2 OP2B).
1610 Return NULL_EXPR if we can't simplify this to a single expression. */
1612 static tree
1613 and_var_with_comparison_1 (gimple stmt,
1614 enum tree_code code2, tree op2a, tree op2b)
1616 tree var = gimple_assign_lhs (stmt);
1617 tree true_test_var = NULL_TREE;
1618 tree false_test_var = NULL_TREE;
1619 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1621 /* Check for identities like (var AND (var == 0)) => false. */
1622 if (TREE_CODE (op2a) == SSA_NAME
1623 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1625 if ((code2 == NE_EXPR && integer_zerop (op2b))
1626 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1628 true_test_var = op2a;
1629 if (var == true_test_var)
1630 return var;
1632 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1633 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1635 false_test_var = op2a;
1636 if (var == false_test_var)
1637 return boolean_false_node;
1641 /* If the definition is a comparison, recurse on it. */
1642 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1644 tree t = and_comparisons_1 (innercode,
1645 gimple_assign_rhs1 (stmt),
1646 gimple_assign_rhs2 (stmt),
1647 code2,
1648 op2a,
1649 op2b);
1650 if (t)
1651 return t;
1654 /* If the definition is an AND or OR expression, we may be able to
1655 simplify by reassociating. */
1656 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1657 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1659 tree inner1 = gimple_assign_rhs1 (stmt);
1660 tree inner2 = gimple_assign_rhs2 (stmt);
1661 gimple s;
1662 tree t;
1663 tree partial = NULL_TREE;
1664 bool is_and = (innercode == BIT_AND_EXPR);
1666 /* Check for boolean identities that don't require recursive examination
1667 of inner1/inner2:
1668 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1669 inner1 AND (inner1 OR inner2) => inner1
1670 !inner1 AND (inner1 AND inner2) => false
1671 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1672 Likewise for similar cases involving inner2. */
1673 if (inner1 == true_test_var)
1674 return (is_and ? var : inner1);
1675 else if (inner2 == true_test_var)
1676 return (is_and ? var : inner2);
1677 else if (inner1 == false_test_var)
1678 return (is_and
1679 ? boolean_false_node
1680 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1681 else if (inner2 == false_test_var)
1682 return (is_and
1683 ? boolean_false_node
1684 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1686 /* Next, redistribute/reassociate the AND across the inner tests.
1687 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1688 if (TREE_CODE (inner1) == SSA_NAME
1689 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1690 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1691 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1692 gimple_assign_rhs1 (s),
1693 gimple_assign_rhs2 (s),
1694 code2, op2a, op2b)))
1696 /* Handle the AND case, where we are reassociating:
1697 (inner1 AND inner2) AND (op2a code2 op2b)
1698 => (t AND inner2)
1699 If the partial result t is a constant, we win. Otherwise
1700 continue on to try reassociating with the other inner test. */
1701 if (is_and)
1703 if (integer_onep (t))
1704 return inner2;
1705 else if (integer_zerop (t))
1706 return boolean_false_node;
1709 /* Handle the OR case, where we are redistributing:
1710 (inner1 OR inner2) AND (op2a code2 op2b)
1711 => (t OR (inner2 AND (op2a code2 op2b))) */
1712 else if (integer_onep (t))
1713 return boolean_true_node;
1715 /* Save partial result for later. */
1716 partial = t;
1719 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1720 if (TREE_CODE (inner2) == SSA_NAME
1721 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1722 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1723 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1724 gimple_assign_rhs1 (s),
1725 gimple_assign_rhs2 (s),
1726 code2, op2a, op2b)))
1728 /* Handle the AND case, where we are reassociating:
1729 (inner1 AND inner2) AND (op2a code2 op2b)
1730 => (inner1 AND t) */
1731 if (is_and)
1733 if (integer_onep (t))
1734 return inner1;
1735 else if (integer_zerop (t))
1736 return boolean_false_node;
1737 /* If both are the same, we can apply the identity
1738 (x AND x) == x. */
1739 else if (partial && same_bool_result_p (t, partial))
1740 return t;
1743 /* Handle the OR case. where we are redistributing:
1744 (inner1 OR inner2) AND (op2a code2 op2b)
1745 => (t OR (inner1 AND (op2a code2 op2b)))
1746 => (t OR partial) */
1747 else
1749 if (integer_onep (t))
1750 return boolean_true_node;
1751 else if (partial)
1753 /* We already got a simplification for the other
1754 operand to the redistributed OR expression. The
1755 interesting case is when at least one is false.
1756 Or, if both are the same, we can apply the identity
1757 (x OR x) == x. */
1758 if (integer_zerop (partial))
1759 return t;
1760 else if (integer_zerop (t))
1761 return partial;
1762 else if (same_bool_result_p (t, partial))
1763 return t;
1768 return NULL_TREE;
1771 /* Try to simplify the AND of two comparisons defined by
1772 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1773 If this can be done without constructing an intermediate value,
1774 return the resulting tree; otherwise NULL_TREE is returned.
1775 This function is deliberately asymmetric as it recurses on SSA_DEFs
1776 in the first comparison but not the second. */
1778 static tree
1779 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1780 enum tree_code code2, tree op2a, tree op2b)
1782 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1784 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1785 if (operand_equal_p (op1a, op2a, 0)
1786 && operand_equal_p (op1b, op2b, 0))
1788 /* Result will be either NULL_TREE, or a combined comparison. */
1789 tree t = combine_comparisons (UNKNOWN_LOCATION,
1790 TRUTH_ANDIF_EXPR, code1, code2,
1791 truth_type, op1a, op1b);
1792 if (t)
1793 return t;
1796 /* Likewise the swapped case of the above. */
1797 if (operand_equal_p (op1a, op2b, 0)
1798 && operand_equal_p (op1b, op2a, 0))
1800 /* Result will be either NULL_TREE, or a combined comparison. */
1801 tree t = combine_comparisons (UNKNOWN_LOCATION,
1802 TRUTH_ANDIF_EXPR, code1,
1803 swap_tree_comparison (code2),
1804 truth_type, op1a, op1b);
1805 if (t)
1806 return t;
1809 /* If both comparisons are of the same value against constants, we might
1810 be able to merge them. */
1811 if (operand_equal_p (op1a, op2a, 0)
1812 && TREE_CODE (op1b) == INTEGER_CST
1813 && TREE_CODE (op2b) == INTEGER_CST)
1815 int cmp = tree_int_cst_compare (op1b, op2b);
1817 /* If we have (op1a == op1b), we should either be able to
1818 return that or FALSE, depending on whether the constant op1b
1819 also satisfies the other comparison against op2b. */
1820 if (code1 == EQ_EXPR)
1822 bool done = true;
1823 bool val;
1824 switch (code2)
1826 case EQ_EXPR: val = (cmp == 0); break;
1827 case NE_EXPR: val = (cmp != 0); break;
1828 case LT_EXPR: val = (cmp < 0); break;
1829 case GT_EXPR: val = (cmp > 0); break;
1830 case LE_EXPR: val = (cmp <= 0); break;
1831 case GE_EXPR: val = (cmp >= 0); break;
1832 default: done = false;
1834 if (done)
1836 if (val)
1837 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1838 else
1839 return boolean_false_node;
1842 /* Likewise if the second comparison is an == comparison. */
1843 else if (code2 == EQ_EXPR)
1845 bool done = true;
1846 bool val;
1847 switch (code1)
1849 case EQ_EXPR: val = (cmp == 0); break;
1850 case NE_EXPR: val = (cmp != 0); break;
1851 case LT_EXPR: val = (cmp > 0); break;
1852 case GT_EXPR: val = (cmp < 0); break;
1853 case LE_EXPR: val = (cmp >= 0); break;
1854 case GE_EXPR: val = (cmp <= 0); break;
1855 default: done = false;
1857 if (done)
1859 if (val)
1860 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1861 else
1862 return boolean_false_node;
1866 /* Same business with inequality tests. */
1867 else if (code1 == NE_EXPR)
1869 bool val;
1870 switch (code2)
1872 case EQ_EXPR: val = (cmp != 0); break;
1873 case NE_EXPR: val = (cmp == 0); break;
1874 case LT_EXPR: val = (cmp >= 0); break;
1875 case GT_EXPR: val = (cmp <= 0); break;
1876 case LE_EXPR: val = (cmp > 0); break;
1877 case GE_EXPR: val = (cmp < 0); break;
1878 default:
1879 val = false;
1881 if (val)
1882 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1884 else if (code2 == NE_EXPR)
1886 bool val;
1887 switch (code1)
1889 case EQ_EXPR: val = (cmp == 0); break;
1890 case NE_EXPR: val = (cmp != 0); break;
1891 case LT_EXPR: val = (cmp <= 0); break;
1892 case GT_EXPR: val = (cmp >= 0); break;
1893 case LE_EXPR: val = (cmp < 0); break;
1894 case GE_EXPR: val = (cmp > 0); break;
1895 default:
1896 val = false;
1898 if (val)
1899 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1902 /* Chose the more restrictive of two < or <= comparisons. */
1903 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1904 && (code2 == LT_EXPR || code2 == LE_EXPR))
1906 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1907 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1908 else
1909 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1912 /* Likewise chose the more restrictive of two > or >= comparisons. */
1913 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1914 && (code2 == GT_EXPR || code2 == GE_EXPR))
1916 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1917 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1918 else
1919 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1922 /* Check for singleton ranges. */
1923 else if (cmp == 0
1924 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1925 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1926 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1928 /* Check for disjoint ranges. */
1929 else if (cmp <= 0
1930 && (code1 == LT_EXPR || code1 == LE_EXPR)
1931 && (code2 == GT_EXPR || code2 == GE_EXPR))
1932 return boolean_false_node;
1933 else if (cmp >= 0
1934 && (code1 == GT_EXPR || code1 == GE_EXPR)
1935 && (code2 == LT_EXPR || code2 == LE_EXPR))
1936 return boolean_false_node;
1939 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1940 NAME's definition is a truth value. See if there are any simplifications
1941 that can be done against the NAME's definition. */
1942 if (TREE_CODE (op1a) == SSA_NAME
1943 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1944 && (integer_zerop (op1b) || integer_onep (op1b)))
1946 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1947 || (code1 == NE_EXPR && integer_onep (op1b)));
1948 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1949 switch (gimple_code (stmt))
1951 case GIMPLE_ASSIGN:
1952 /* Try to simplify by copy-propagating the definition. */
1953 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1955 case GIMPLE_PHI:
1956 /* If every argument to the PHI produces the same result when
1957 ANDed with the second comparison, we win.
1958 Do not do this unless the type is bool since we need a bool
1959 result here anyway. */
1960 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1962 tree result = NULL_TREE;
1963 unsigned i;
1964 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1966 tree arg = gimple_phi_arg_def (stmt, i);
1968 /* If this PHI has itself as an argument, ignore it.
1969 If all the other args produce the same result,
1970 we're still OK. */
1971 if (arg == gimple_phi_result (stmt))
1972 continue;
1973 else if (TREE_CODE (arg) == INTEGER_CST)
1975 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1977 if (!result)
1978 result = boolean_false_node;
1979 else if (!integer_zerop (result))
1980 return NULL_TREE;
1982 else if (!result)
1983 result = fold_build2 (code2, boolean_type_node,
1984 op2a, op2b);
1985 else if (!same_bool_comparison_p (result,
1986 code2, op2a, op2b))
1987 return NULL_TREE;
1989 else if (TREE_CODE (arg) == SSA_NAME
1990 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1992 tree temp;
1993 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1994 /* In simple cases we can look through PHI nodes,
1995 but we have to be careful with loops.
1996 See PR49073. */
1997 if (! dom_info_available_p (CDI_DOMINATORS)
1998 || gimple_bb (def_stmt) == gimple_bb (stmt)
1999 || dominated_by_p (CDI_DOMINATORS,
2000 gimple_bb (def_stmt),
2001 gimple_bb (stmt)))
2002 return NULL_TREE;
2003 temp = and_var_with_comparison (arg, invert, code2,
2004 op2a, op2b);
2005 if (!temp)
2006 return NULL_TREE;
2007 else if (!result)
2008 result = temp;
2009 else if (!same_bool_result_p (result, temp))
2010 return NULL_TREE;
2012 else
2013 return NULL_TREE;
2015 return result;
2018 default:
2019 break;
2022 return NULL_TREE;
2025 /* Try to simplify the AND of two comparisons, specified by
2026 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2027 If this can be simplified to a single expression (without requiring
2028 introducing more SSA variables to hold intermediate values),
2029 return the resulting tree. Otherwise return NULL_TREE.
2030 If the result expression is non-null, it has boolean type. */
2032 tree
2033 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2034 enum tree_code code2, tree op2a, tree op2b)
2036 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2037 if (t)
2038 return t;
2039 else
2040 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2043 /* Helper function for or_comparisons_1: try to simplify the OR of the
2044 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2045 If INVERT is true, invert the value of VAR before doing the OR.
2046 Return NULL_EXPR if we can't simplify this to a single expression. */
2048 static tree
2049 or_var_with_comparison (tree var, bool invert,
2050 enum tree_code code2, tree op2a, tree op2b)
2052 tree t;
2053 gimple stmt = SSA_NAME_DEF_STMT (var);
2055 /* We can only deal with variables whose definitions are assignments. */
2056 if (!is_gimple_assign (stmt))
2057 return NULL_TREE;
2059 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2060 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2061 Then we only have to consider the simpler non-inverted cases. */
2062 if (invert)
2063 t = and_var_with_comparison_1 (stmt,
2064 invert_tree_comparison (code2, false),
2065 op2a, op2b);
2066 else
2067 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2068 return canonicalize_bool (t, invert);
2071 /* Try to simplify the OR of the ssa variable defined by the assignment
2072 STMT with the comparison specified by (OP2A CODE2 OP2B).
2073 Return NULL_EXPR if we can't simplify this to a single expression. */
2075 static tree
2076 or_var_with_comparison_1 (gimple stmt,
2077 enum tree_code code2, tree op2a, tree op2b)
2079 tree var = gimple_assign_lhs (stmt);
2080 tree true_test_var = NULL_TREE;
2081 tree false_test_var = NULL_TREE;
2082 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2084 /* Check for identities like (var OR (var != 0)) => true . */
2085 if (TREE_CODE (op2a) == SSA_NAME
2086 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2088 if ((code2 == NE_EXPR && integer_zerop (op2b))
2089 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2091 true_test_var = op2a;
2092 if (var == true_test_var)
2093 return var;
2095 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2096 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2098 false_test_var = op2a;
2099 if (var == false_test_var)
2100 return boolean_true_node;
2104 /* If the definition is a comparison, recurse on it. */
2105 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2107 tree t = or_comparisons_1 (innercode,
2108 gimple_assign_rhs1 (stmt),
2109 gimple_assign_rhs2 (stmt),
2110 code2,
2111 op2a,
2112 op2b);
2113 if (t)
2114 return t;
2117 /* If the definition is an AND or OR expression, we may be able to
2118 simplify by reassociating. */
2119 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2120 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2122 tree inner1 = gimple_assign_rhs1 (stmt);
2123 tree inner2 = gimple_assign_rhs2 (stmt);
2124 gimple s;
2125 tree t;
2126 tree partial = NULL_TREE;
2127 bool is_or = (innercode == BIT_IOR_EXPR);
2129 /* Check for boolean identities that don't require recursive examination
2130 of inner1/inner2:
2131 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2132 inner1 OR (inner1 AND inner2) => inner1
2133 !inner1 OR (inner1 OR inner2) => true
2134 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2136 if (inner1 == true_test_var)
2137 return (is_or ? var : inner1);
2138 else if (inner2 == true_test_var)
2139 return (is_or ? var : inner2);
2140 else if (inner1 == false_test_var)
2141 return (is_or
2142 ? boolean_true_node
2143 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2144 else if (inner2 == false_test_var)
2145 return (is_or
2146 ? boolean_true_node
2147 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2149 /* Next, redistribute/reassociate the OR across the inner tests.
2150 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2151 if (TREE_CODE (inner1) == SSA_NAME
2152 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2153 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2154 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2155 gimple_assign_rhs1 (s),
2156 gimple_assign_rhs2 (s),
2157 code2, op2a, op2b)))
2159 /* Handle the OR case, where we are reassociating:
2160 (inner1 OR inner2) OR (op2a code2 op2b)
2161 => (t OR inner2)
2162 If the partial result t is a constant, we win. Otherwise
2163 continue on to try reassociating with the other inner test. */
2164 if (is_or)
2166 if (integer_onep (t))
2167 return boolean_true_node;
2168 else if (integer_zerop (t))
2169 return inner2;
2172 /* Handle the AND case, where we are redistributing:
2173 (inner1 AND inner2) OR (op2a code2 op2b)
2174 => (t AND (inner2 OR (op2a code op2b))) */
2175 else if (integer_zerop (t))
2176 return boolean_false_node;
2178 /* Save partial result for later. */
2179 partial = t;
2182 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2183 if (TREE_CODE (inner2) == SSA_NAME
2184 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2185 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2186 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2187 gimple_assign_rhs1 (s),
2188 gimple_assign_rhs2 (s),
2189 code2, op2a, op2b)))
2191 /* Handle the OR case, where we are reassociating:
2192 (inner1 OR inner2) OR (op2a code2 op2b)
2193 => (inner1 OR t)
2194 => (t OR partial) */
2195 if (is_or)
2197 if (integer_zerop (t))
2198 return inner1;
2199 else if (integer_onep (t))
2200 return boolean_true_node;
2201 /* If both are the same, we can apply the identity
2202 (x OR x) == x. */
2203 else if (partial && same_bool_result_p (t, partial))
2204 return t;
2207 /* Handle the AND case, where we are redistributing:
2208 (inner1 AND inner2) OR (op2a code2 op2b)
2209 => (t AND (inner1 OR (op2a code2 op2b)))
2210 => (t AND partial) */
2211 else
2213 if (integer_zerop (t))
2214 return boolean_false_node;
2215 else if (partial)
2217 /* We already got a simplification for the other
2218 operand to the redistributed AND expression. The
2219 interesting case is when at least one is true.
2220 Or, if both are the same, we can apply the identity
2221 (x AND x) == x. */
2222 if (integer_onep (partial))
2223 return t;
2224 else if (integer_onep (t))
2225 return partial;
2226 else if (same_bool_result_p (t, partial))
2227 return t;
2232 return NULL_TREE;
2235 /* Try to simplify the OR of two comparisons defined by
2236 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2237 If this can be done without constructing an intermediate value,
2238 return the resulting tree; otherwise NULL_TREE is returned.
2239 This function is deliberately asymmetric as it recurses on SSA_DEFs
2240 in the first comparison but not the second. */
2242 static tree
2243 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2244 enum tree_code code2, tree op2a, tree op2b)
2246 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2248 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2249 if (operand_equal_p (op1a, op2a, 0)
2250 && operand_equal_p (op1b, op2b, 0))
2252 /* Result will be either NULL_TREE, or a combined comparison. */
2253 tree t = combine_comparisons (UNKNOWN_LOCATION,
2254 TRUTH_ORIF_EXPR, code1, code2,
2255 truth_type, op1a, op1b);
2256 if (t)
2257 return t;
2260 /* Likewise the swapped case of the above. */
2261 if (operand_equal_p (op1a, op2b, 0)
2262 && operand_equal_p (op1b, op2a, 0))
2264 /* Result will be either NULL_TREE, or a combined comparison. */
2265 tree t = combine_comparisons (UNKNOWN_LOCATION,
2266 TRUTH_ORIF_EXPR, code1,
2267 swap_tree_comparison (code2),
2268 truth_type, op1a, op1b);
2269 if (t)
2270 return t;
2273 /* If both comparisons are of the same value against constants, we might
2274 be able to merge them. */
2275 if (operand_equal_p (op1a, op2a, 0)
2276 && TREE_CODE (op1b) == INTEGER_CST
2277 && TREE_CODE (op2b) == INTEGER_CST)
2279 int cmp = tree_int_cst_compare (op1b, op2b);
2281 /* If we have (op1a != op1b), we should either be able to
2282 return that or TRUE, depending on whether the constant op1b
2283 also satisfies the other comparison against op2b. */
2284 if (code1 == NE_EXPR)
2286 bool done = true;
2287 bool val;
2288 switch (code2)
2290 case EQ_EXPR: val = (cmp == 0); break;
2291 case NE_EXPR: val = (cmp != 0); break;
2292 case LT_EXPR: val = (cmp < 0); break;
2293 case GT_EXPR: val = (cmp > 0); break;
2294 case LE_EXPR: val = (cmp <= 0); break;
2295 case GE_EXPR: val = (cmp >= 0); break;
2296 default: done = false;
2298 if (done)
2300 if (val)
2301 return boolean_true_node;
2302 else
2303 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2306 /* Likewise if the second comparison is a != comparison. */
2307 else if (code2 == NE_EXPR)
2309 bool done = true;
2310 bool val;
2311 switch (code1)
2313 case EQ_EXPR: val = (cmp == 0); break;
2314 case NE_EXPR: val = (cmp != 0); break;
2315 case LT_EXPR: val = (cmp > 0); break;
2316 case GT_EXPR: val = (cmp < 0); break;
2317 case LE_EXPR: val = (cmp >= 0); break;
2318 case GE_EXPR: val = (cmp <= 0); break;
2319 default: done = false;
2321 if (done)
2323 if (val)
2324 return boolean_true_node;
2325 else
2326 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2330 /* See if an equality test is redundant with the other comparison. */
2331 else if (code1 == EQ_EXPR)
2333 bool val;
2334 switch (code2)
2336 case EQ_EXPR: val = (cmp == 0); break;
2337 case NE_EXPR: val = (cmp != 0); break;
2338 case LT_EXPR: val = (cmp < 0); break;
2339 case GT_EXPR: val = (cmp > 0); break;
2340 case LE_EXPR: val = (cmp <= 0); break;
2341 case GE_EXPR: val = (cmp >= 0); break;
2342 default:
2343 val = false;
2345 if (val)
2346 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2348 else if (code2 == EQ_EXPR)
2350 bool val;
2351 switch (code1)
2353 case EQ_EXPR: val = (cmp == 0); break;
2354 case NE_EXPR: val = (cmp != 0); break;
2355 case LT_EXPR: val = (cmp > 0); break;
2356 case GT_EXPR: val = (cmp < 0); break;
2357 case LE_EXPR: val = (cmp >= 0); break;
2358 case GE_EXPR: val = (cmp <= 0); break;
2359 default:
2360 val = false;
2362 if (val)
2363 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2366 /* Chose the less restrictive of two < or <= comparisons. */
2367 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2368 && (code2 == LT_EXPR || code2 == LE_EXPR))
2370 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2371 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2372 else
2373 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2376 /* Likewise chose the less restrictive of two > or >= comparisons. */
2377 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2378 && (code2 == GT_EXPR || code2 == GE_EXPR))
2380 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2381 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2382 else
2383 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2386 /* Check for singleton ranges. */
2387 else if (cmp == 0
2388 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2389 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2390 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2392 /* Check for less/greater pairs that don't restrict the range at all. */
2393 else if (cmp >= 0
2394 && (code1 == LT_EXPR || code1 == LE_EXPR)
2395 && (code2 == GT_EXPR || code2 == GE_EXPR))
2396 return boolean_true_node;
2397 else if (cmp <= 0
2398 && (code1 == GT_EXPR || code1 == GE_EXPR)
2399 && (code2 == LT_EXPR || code2 == LE_EXPR))
2400 return boolean_true_node;
2403 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2404 NAME's definition is a truth value. See if there are any simplifications
2405 that can be done against the NAME's definition. */
2406 if (TREE_CODE (op1a) == SSA_NAME
2407 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2408 && (integer_zerop (op1b) || integer_onep (op1b)))
2410 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2411 || (code1 == NE_EXPR && integer_onep (op1b)));
2412 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2413 switch (gimple_code (stmt))
2415 case GIMPLE_ASSIGN:
2416 /* Try to simplify by copy-propagating the definition. */
2417 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2419 case GIMPLE_PHI:
2420 /* If every argument to the PHI produces the same result when
2421 ORed with the second comparison, we win.
2422 Do not do this unless the type is bool since we need a bool
2423 result here anyway. */
2424 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2426 tree result = NULL_TREE;
2427 unsigned i;
2428 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2430 tree arg = gimple_phi_arg_def (stmt, i);
2432 /* If this PHI has itself as an argument, ignore it.
2433 If all the other args produce the same result,
2434 we're still OK. */
2435 if (arg == gimple_phi_result (stmt))
2436 continue;
2437 else if (TREE_CODE (arg) == INTEGER_CST)
2439 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2441 if (!result)
2442 result = boolean_true_node;
2443 else if (!integer_onep (result))
2444 return NULL_TREE;
2446 else if (!result)
2447 result = fold_build2 (code2, boolean_type_node,
2448 op2a, op2b);
2449 else if (!same_bool_comparison_p (result,
2450 code2, op2a, op2b))
2451 return NULL_TREE;
2453 else if (TREE_CODE (arg) == SSA_NAME
2454 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2456 tree temp;
2457 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2458 /* In simple cases we can look through PHI nodes,
2459 but we have to be careful with loops.
2460 See PR49073. */
2461 if (! dom_info_available_p (CDI_DOMINATORS)
2462 || gimple_bb (def_stmt) == gimple_bb (stmt)
2463 || dominated_by_p (CDI_DOMINATORS,
2464 gimple_bb (def_stmt),
2465 gimple_bb (stmt)))
2466 return NULL_TREE;
2467 temp = or_var_with_comparison (arg, invert, code2,
2468 op2a, op2b);
2469 if (!temp)
2470 return NULL_TREE;
2471 else if (!result)
2472 result = temp;
2473 else if (!same_bool_result_p (result, temp))
2474 return NULL_TREE;
2476 else
2477 return NULL_TREE;
2479 return result;
2482 default:
2483 break;
2486 return NULL_TREE;
2489 /* Try to simplify the OR of two comparisons, specified by
2490 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2491 If this can be simplified to a single expression (without requiring
2492 introducing more SSA variables to hold intermediate values),
2493 return the resulting tree. Otherwise return NULL_TREE.
2494 If the result expression is non-null, it has boolean type. */
2496 tree
2497 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2498 enum tree_code code2, tree op2a, tree op2b)
2500 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2501 if (t)
2502 return t;
2503 else
2504 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2508 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2510 Either NULL_TREE, a simplified but non-constant or a constant
2511 is returned.
2513 ??? This should go into a gimple-fold-inline.h file to be eventually
2514 privatized with the single valueize function used in the various TUs
2515 to avoid the indirect function call overhead. */
2517 tree
2518 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2520 location_t loc = gimple_location (stmt);
2521 switch (gimple_code (stmt))
2523 case GIMPLE_ASSIGN:
2525 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2527 switch (get_gimple_rhs_class (subcode))
2529 case GIMPLE_SINGLE_RHS:
2531 tree rhs = gimple_assign_rhs1 (stmt);
2532 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2534 if (TREE_CODE (rhs) == SSA_NAME)
2536 /* If the RHS is an SSA_NAME, return its known constant value,
2537 if any. */
2538 return (*valueize) (rhs);
2540 /* Handle propagating invariant addresses into address
2541 operations. */
2542 else if (TREE_CODE (rhs) == ADDR_EXPR
2543 && !is_gimple_min_invariant (rhs))
2545 HOST_WIDE_INT offset = 0;
2546 tree base;
2547 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2548 &offset,
2549 valueize);
2550 if (base
2551 && (CONSTANT_CLASS_P (base)
2552 || decl_address_invariant_p (base)))
2553 return build_invariant_address (TREE_TYPE (rhs),
2554 base, offset);
2556 else if (TREE_CODE (rhs) == CONSTRUCTOR
2557 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2558 && (CONSTRUCTOR_NELTS (rhs)
2559 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2561 unsigned i;
2562 tree val, *vec;
2564 vec = XALLOCAVEC (tree,
2565 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2566 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2568 val = (*valueize) (val);
2569 if (TREE_CODE (val) == INTEGER_CST
2570 || TREE_CODE (val) == REAL_CST
2571 || TREE_CODE (val) == FIXED_CST)
2572 vec[i] = val;
2573 else
2574 return NULL_TREE;
2577 return build_vector (TREE_TYPE (rhs), vec);
2579 if (subcode == OBJ_TYPE_REF)
2581 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
2582 /* If callee is constant, we can fold away the wrapper. */
2583 if (is_gimple_min_invariant (val))
2584 return val;
2587 if (kind == tcc_reference)
2589 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2590 || TREE_CODE (rhs) == REALPART_EXPR
2591 || TREE_CODE (rhs) == IMAGPART_EXPR)
2592 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2594 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2595 return fold_unary_loc (EXPR_LOCATION (rhs),
2596 TREE_CODE (rhs),
2597 TREE_TYPE (rhs), val);
2599 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2600 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2602 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2603 return fold_ternary_loc (EXPR_LOCATION (rhs),
2604 TREE_CODE (rhs),
2605 TREE_TYPE (rhs), val,
2606 TREE_OPERAND (rhs, 1),
2607 TREE_OPERAND (rhs, 2));
2609 else if (TREE_CODE (rhs) == MEM_REF
2610 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2612 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2613 if (TREE_CODE (val) == ADDR_EXPR
2614 && is_gimple_min_invariant (val))
2616 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2617 unshare_expr (val),
2618 TREE_OPERAND (rhs, 1));
2619 if (tem)
2620 rhs = tem;
2623 return fold_const_aggregate_ref_1 (rhs, valueize);
2625 else if (kind == tcc_declaration)
2626 return get_symbol_constant_value (rhs);
2627 return rhs;
2630 case GIMPLE_UNARY_RHS:
2632 /* Handle unary operators that can appear in GIMPLE form.
2633 Note that we know the single operand must be a constant,
2634 so this should almost always return a simplified RHS. */
2635 tree lhs = gimple_assign_lhs (stmt);
2636 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2638 /* Conversions are useless for CCP purposes if they are
2639 value-preserving. Thus the restrictions that
2640 useless_type_conversion_p places for restrict qualification
2641 of pointer types should not apply here.
2642 Substitution later will only substitute to allowed places. */
2643 if (CONVERT_EXPR_CODE_P (subcode)
2644 && POINTER_TYPE_P (TREE_TYPE (lhs))
2645 && POINTER_TYPE_P (TREE_TYPE (op0))
2646 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2647 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2648 && TYPE_MODE (TREE_TYPE (lhs))
2649 == TYPE_MODE (TREE_TYPE (op0)))
2650 return op0;
2652 return
2653 fold_unary_ignore_overflow_loc (loc, subcode,
2654 gimple_expr_type (stmt), op0);
2657 case GIMPLE_BINARY_RHS:
2659 /* Handle binary operators that can appear in GIMPLE form. */
2660 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2661 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2663 /* Translate &x + CST into an invariant form suitable for
2664 further propagation. */
2665 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2666 && TREE_CODE (op0) == ADDR_EXPR
2667 && TREE_CODE (op1) == INTEGER_CST)
2669 tree off = fold_convert (ptr_type_node, op1);
2670 return build_fold_addr_expr_loc
2671 (loc,
2672 fold_build2 (MEM_REF,
2673 TREE_TYPE (TREE_TYPE (op0)),
2674 unshare_expr (op0), off));
2677 return fold_binary_loc (loc, subcode,
2678 gimple_expr_type (stmt), op0, op1);
2681 case GIMPLE_TERNARY_RHS:
2683 /* Handle ternary operators that can appear in GIMPLE form. */
2684 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2685 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2686 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2688 /* Fold embedded expressions in ternary codes. */
2689 if ((subcode == COND_EXPR
2690 || subcode == VEC_COND_EXPR)
2691 && COMPARISON_CLASS_P (op0))
2693 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2694 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2695 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2696 TREE_TYPE (op0), op00, op01);
2697 if (tem)
2698 op0 = tem;
2701 return fold_ternary_loc (loc, subcode,
2702 gimple_expr_type (stmt), op0, op1, op2);
2705 default:
2706 gcc_unreachable ();
2710 case GIMPLE_CALL:
2712 tree fn;
2714 if (gimple_call_internal_p (stmt))
2716 enum tree_code subcode = ERROR_MARK;
2717 switch (gimple_call_internal_fn (stmt))
2719 case IFN_UBSAN_CHECK_ADD:
2720 subcode = PLUS_EXPR;
2721 break;
2722 case IFN_UBSAN_CHECK_SUB:
2723 subcode = MINUS_EXPR;
2724 break;
2725 case IFN_UBSAN_CHECK_MUL:
2726 subcode = MULT_EXPR;
2727 break;
2728 default:
2729 return NULL_TREE;
2731 tree op0 = (*valueize) (gimple_call_arg (stmt, 0));
2732 tree op1 = (*valueize) (gimple_call_arg (stmt, 1));
2734 if (TREE_CODE (op0) != INTEGER_CST
2735 || TREE_CODE (op1) != INTEGER_CST)
2736 return NULL_TREE;
2737 tree res = fold_binary_loc (loc, subcode,
2738 TREE_TYPE (gimple_call_arg (stmt, 0)),
2739 op0, op1);
2740 if (res
2741 && TREE_CODE (res) == INTEGER_CST
2742 && !TREE_OVERFLOW (res))
2743 return res;
2744 return NULL_TREE;
2747 fn = (*valueize) (gimple_call_fn (stmt));
2748 if (TREE_CODE (fn) == ADDR_EXPR
2749 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2750 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
2751 && gimple_builtin_call_types_compatible_p (stmt,
2752 TREE_OPERAND (fn, 0)))
2754 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2755 tree call, retval;
2756 unsigned i;
2757 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2758 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2759 call = build_call_array_loc (loc,
2760 gimple_call_return_type (stmt),
2761 fn, gimple_call_num_args (stmt), args);
2762 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2763 if (retval)
2765 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2766 STRIP_NOPS (retval);
2767 retval = fold_convert (gimple_call_return_type (stmt), retval);
2769 return retval;
2771 return NULL_TREE;
2774 default:
2775 return NULL_TREE;
2779 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2780 Returns NULL_TREE if folding to a constant is not possible, otherwise
2781 returns a constant according to is_gimple_min_invariant. */
2783 tree
2784 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2786 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2787 if (res && is_gimple_min_invariant (res))
2788 return res;
2789 return NULL_TREE;
2793 /* The following set of functions are supposed to fold references using
2794 their constant initializers. */
2796 static tree fold_ctor_reference (tree type, tree ctor,
2797 unsigned HOST_WIDE_INT offset,
2798 unsigned HOST_WIDE_INT size, tree);
2800 /* See if we can find constructor defining value of BASE.
2801 When we know the consructor with constant offset (such as
2802 base is array[40] and we do know constructor of array), then
2803 BIT_OFFSET is adjusted accordingly.
2805 As a special case, return error_mark_node when constructor
2806 is not explicitly available, but it is known to be zero
2807 such as 'static const int a;'. */
2808 static tree
2809 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2810 tree (*valueize)(tree))
2812 HOST_WIDE_INT bit_offset2, size, max_size;
2813 if (TREE_CODE (base) == MEM_REF)
2815 if (!integer_zerop (TREE_OPERAND (base, 1)))
2817 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
2818 return NULL_TREE;
2819 *bit_offset += (mem_ref_offset (base).low
2820 * BITS_PER_UNIT);
2823 if (valueize
2824 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2825 base = valueize (TREE_OPERAND (base, 0));
2826 if (!base || TREE_CODE (base) != ADDR_EXPR)
2827 return NULL_TREE;
2828 base = TREE_OPERAND (base, 0);
2831 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2832 DECL_INITIAL. If BASE is a nested reference into another
2833 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2834 the inner reference. */
2835 switch (TREE_CODE (base))
2837 case VAR_DECL:
2838 case CONST_DECL:
2840 tree init = ctor_for_folding (base);
2842 /* Our semantic is exact opposite of ctor_for_folding;
2843 NULL means unknown, while error_mark_node is 0. */
2844 if (init == error_mark_node)
2845 return NULL_TREE;
2846 if (!init)
2847 return error_mark_node;
2848 return init;
2851 case ARRAY_REF:
2852 case COMPONENT_REF:
2853 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2854 if (max_size == -1 || size != max_size)
2855 return NULL_TREE;
2856 *bit_offset += bit_offset2;
2857 return get_base_constructor (base, bit_offset, valueize);
2859 case STRING_CST:
2860 case CONSTRUCTOR:
2861 return base;
2863 default:
2864 return NULL_TREE;
2868 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2869 to the memory at bit OFFSET.
2871 We do only simple job of folding byte accesses. */
2873 static tree
2874 fold_string_cst_ctor_reference (tree type, tree ctor,
2875 unsigned HOST_WIDE_INT offset,
2876 unsigned HOST_WIDE_INT size)
2878 if (INTEGRAL_TYPE_P (type)
2879 && (TYPE_MODE (type)
2880 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2881 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2882 == MODE_INT)
2883 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2884 && size == BITS_PER_UNIT
2885 && !(offset % BITS_PER_UNIT))
2887 offset /= BITS_PER_UNIT;
2888 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2889 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2890 [offset]));
2891 /* Folding
2892 const char a[20]="hello";
2893 return a[10];
2895 might lead to offset greater than string length. In this case we
2896 know value is either initialized to 0 or out of bounds. Return 0
2897 in both cases. */
2898 return build_zero_cst (type);
2900 return NULL_TREE;
2903 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2904 SIZE to the memory at bit OFFSET. */
2906 static tree
2907 fold_array_ctor_reference (tree type, tree ctor,
2908 unsigned HOST_WIDE_INT offset,
2909 unsigned HOST_WIDE_INT size,
2910 tree from_decl)
2912 unsigned HOST_WIDE_INT cnt;
2913 tree cfield, cval;
2914 double_int low_bound, elt_size;
2915 double_int index, max_index;
2916 double_int access_index;
2917 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2918 HOST_WIDE_INT inner_offset;
2920 /* Compute low bound and elt size. */
2921 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2922 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2923 if (domain_type && TYPE_MIN_VALUE (domain_type))
2925 /* Static constructors for variably sized objects makes no sense. */
2926 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2927 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2928 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2930 else
2931 low_bound = double_int_zero;
2932 /* Static constructors for variably sized objects makes no sense. */
2933 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2934 == INTEGER_CST);
2935 elt_size =
2936 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2939 /* We can handle only constantly sized accesses that are known to not
2940 be larger than size of array element. */
2941 if (!TYPE_SIZE_UNIT (type)
2942 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2943 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2944 return NULL_TREE;
2946 /* Compute the array index we look for. */
2947 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2948 .udiv (elt_size, TRUNC_DIV_EXPR);
2949 access_index += low_bound;
2950 if (index_type)
2951 access_index = access_index.ext (TYPE_PRECISION (index_type),
2952 TYPE_UNSIGNED (index_type));
2954 /* And offset within the access. */
2955 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2957 /* See if the array field is large enough to span whole access. We do not
2958 care to fold accesses spanning multiple array indexes. */
2959 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2960 return NULL_TREE;
2962 index = low_bound - double_int_one;
2963 if (index_type)
2964 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2966 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2968 /* Array constructor might explicitely set index, or specify range
2969 or leave index NULL meaning that it is next index after previous
2970 one. */
2971 if (cfield)
2973 if (TREE_CODE (cfield) == INTEGER_CST)
2974 max_index = index = tree_to_double_int (cfield);
2975 else
2977 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2978 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2979 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2982 else
2984 index += double_int_one;
2985 if (index_type)
2986 index = index.ext (TYPE_PRECISION (index_type),
2987 TYPE_UNSIGNED (index_type));
2988 max_index = index;
2991 /* Do we have match? */
2992 if (access_index.cmp (index, 1) >= 0
2993 && access_index.cmp (max_index, 1) <= 0)
2994 return fold_ctor_reference (type, cval, inner_offset, size,
2995 from_decl);
2997 /* When memory is not explicitely mentioned in constructor,
2998 it is 0 (or out of range). */
2999 return build_zero_cst (type);
3002 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3003 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3005 static tree
3006 fold_nonarray_ctor_reference (tree type, tree ctor,
3007 unsigned HOST_WIDE_INT offset,
3008 unsigned HOST_WIDE_INT size,
3009 tree from_decl)
3011 unsigned HOST_WIDE_INT cnt;
3012 tree cfield, cval;
3014 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3015 cval)
3017 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3018 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3019 tree field_size = DECL_SIZE (cfield);
3020 double_int bitoffset;
3021 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3022 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
3023 double_int bitoffset_end, access_end;
3025 /* Variable sized objects in static constructors makes no sense,
3026 but field_size can be NULL for flexible array members. */
3027 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3028 && TREE_CODE (byte_offset) == INTEGER_CST
3029 && (field_size != NULL_TREE
3030 ? TREE_CODE (field_size) == INTEGER_CST
3031 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3033 /* Compute bit offset of the field. */
3034 bitoffset = tree_to_double_int (field_offset)
3035 + byte_offset_cst * bits_per_unit_cst;
3036 /* Compute bit offset where the field ends. */
3037 if (field_size != NULL_TREE)
3038 bitoffset_end = bitoffset + tree_to_double_int (field_size);
3039 else
3040 bitoffset_end = double_int_zero;
3042 access_end = double_int::from_uhwi (offset)
3043 + double_int::from_uhwi (size);
3045 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3046 [BITOFFSET, BITOFFSET_END)? */
3047 if (access_end.cmp (bitoffset, 0) > 0
3048 && (field_size == NULL_TREE
3049 || double_int::from_uhwi (offset).slt (bitoffset_end)))
3051 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
3052 /* We do have overlap. Now see if field is large enough to
3053 cover the access. Give up for accesses spanning multiple
3054 fields. */
3055 if (access_end.cmp (bitoffset_end, 0) > 0)
3056 return NULL_TREE;
3057 if (double_int::from_uhwi (offset).slt (bitoffset))
3058 return NULL_TREE;
3059 return fold_ctor_reference (type, cval,
3060 inner_offset.to_uhwi (), size,
3061 from_decl);
3064 /* When memory is not explicitely mentioned in constructor, it is 0. */
3065 return build_zero_cst (type);
3068 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3069 to the memory at bit OFFSET. */
3071 static tree
3072 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3073 unsigned HOST_WIDE_INT size, tree from_decl)
3075 tree ret;
3077 /* We found the field with exact match. */
3078 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3079 && !offset)
3080 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3082 /* We are at the end of walk, see if we can view convert the
3083 result. */
3084 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3085 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3086 && operand_equal_p (TYPE_SIZE (type),
3087 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3089 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3090 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3091 if (ret)
3092 STRIP_NOPS (ret);
3093 return ret;
3095 if (TREE_CODE (ctor) == STRING_CST)
3096 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3097 if (TREE_CODE (ctor) == CONSTRUCTOR)
3100 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3101 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3102 return fold_array_ctor_reference (type, ctor, offset, size,
3103 from_decl);
3104 else
3105 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3106 from_decl);
3109 return NULL_TREE;
3112 /* Return the tree representing the element referenced by T if T is an
3113 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3114 names using VALUEIZE. Return NULL_TREE otherwise. */
3116 tree
3117 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3119 tree ctor, idx, base;
3120 HOST_WIDE_INT offset, size, max_size;
3121 tree tem;
3123 if (TREE_THIS_VOLATILE (t))
3124 return NULL_TREE;
3126 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3127 return get_symbol_constant_value (t);
3129 tem = fold_read_from_constant_string (t);
3130 if (tem)
3131 return tem;
3133 switch (TREE_CODE (t))
3135 case ARRAY_REF:
3136 case ARRAY_RANGE_REF:
3137 /* Constant indexes are handled well by get_base_constructor.
3138 Only special case variable offsets.
3139 FIXME: This code can't handle nested references with variable indexes
3140 (they will be handled only by iteration of ccp). Perhaps we can bring
3141 get_ref_base_and_extent here and make it use a valueize callback. */
3142 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3143 && valueize
3144 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3145 && TREE_CODE (idx) == INTEGER_CST)
3147 tree low_bound, unit_size;
3148 double_int doffset;
3150 /* If the resulting bit-offset is constant, track it. */
3151 if ((low_bound = array_ref_low_bound (t),
3152 TREE_CODE (low_bound) == INTEGER_CST)
3153 && (unit_size = array_ref_element_size (t),
3154 tree_fits_uhwi_p (unit_size))
3155 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3156 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3157 doffset.fits_shwi ()))
3159 offset = doffset.to_shwi ();
3160 offset *= tree_to_uhwi (unit_size);
3161 offset *= BITS_PER_UNIT;
3163 base = TREE_OPERAND (t, 0);
3164 ctor = get_base_constructor (base, &offset, valueize);
3165 /* Empty constructor. Always fold to 0. */
3166 if (ctor == error_mark_node)
3167 return build_zero_cst (TREE_TYPE (t));
3168 /* Out of bound array access. Value is undefined,
3169 but don't fold. */
3170 if (offset < 0)
3171 return NULL_TREE;
3172 /* We can not determine ctor. */
3173 if (!ctor)
3174 return NULL_TREE;
3175 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3176 tree_to_uhwi (unit_size)
3177 * BITS_PER_UNIT,
3178 base);
3181 /* Fallthru. */
3183 case COMPONENT_REF:
3184 case BIT_FIELD_REF:
3185 case TARGET_MEM_REF:
3186 case MEM_REF:
3187 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3188 ctor = get_base_constructor (base, &offset, valueize);
3190 /* Empty constructor. Always fold to 0. */
3191 if (ctor == error_mark_node)
3192 return build_zero_cst (TREE_TYPE (t));
3193 /* We do not know precise address. */
3194 if (max_size == -1 || max_size != size)
3195 return NULL_TREE;
3196 /* We can not determine ctor. */
3197 if (!ctor)
3198 return NULL_TREE;
3200 /* Out of bound array access. Value is undefined, but don't fold. */
3201 if (offset < 0)
3202 return NULL_TREE;
3204 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3205 base);
3207 case REALPART_EXPR:
3208 case IMAGPART_EXPR:
3210 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3211 if (c && TREE_CODE (c) == COMPLEX_CST)
3212 return fold_build1_loc (EXPR_LOCATION (t),
3213 TREE_CODE (t), TREE_TYPE (t), c);
3214 break;
3217 default:
3218 break;
3221 return NULL_TREE;
3224 tree
3225 fold_const_aggregate_ref (tree t)
3227 return fold_const_aggregate_ref_1 (t, NULL);
3230 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3231 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3232 KNOWN_BINFO carries the binfo describing the true type of
3233 OBJ_TYPE_REF_OBJECT(REF). */
3235 tree
3236 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3238 unsigned HOST_WIDE_INT offset, size;
3239 tree v, fn, vtable, init;
3241 vtable = v = BINFO_VTABLE (known_binfo);
3242 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3243 if (!v)
3244 return NULL_TREE;
3246 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3248 offset = tree_to_uhwi (TREE_OPERAND (v, 1)) * BITS_PER_UNIT;
3249 v = TREE_OPERAND (v, 0);
3251 else
3252 offset = 0;
3254 if (TREE_CODE (v) != ADDR_EXPR)
3255 return NULL_TREE;
3256 v = TREE_OPERAND (v, 0);
3258 if (TREE_CODE (v) != VAR_DECL
3259 || !DECL_VIRTUAL_P (v))
3260 return NULL_TREE;
3261 init = ctor_for_folding (v);
3263 /* The virtual tables should always be born with constructors.
3264 and we always should assume that they are avaialble for
3265 folding. At the moment we do not stream them in all cases,
3266 but it should never happen that ctor seem unreachable. */
3267 gcc_assert (init);
3268 if (init == error_mark_node)
3270 gcc_assert (in_lto_p);
3271 return NULL_TREE;
3273 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3274 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
3275 offset += token * size;
3276 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3277 offset, size, v);
3278 if (!fn || integer_zerop (fn))
3279 return NULL_TREE;
3280 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3281 || TREE_CODE (fn) == FDESC_EXPR);
3282 fn = TREE_OPERAND (fn, 0);
3283 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3285 /* When cgraph node is missing and function is not public, we cannot
3286 devirtualize. This can happen in WHOPR when the actual method
3287 ends up in other partition, because we found devirtualization
3288 possibility too late. */
3289 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3290 return NULL_TREE;
3292 /* Make sure we create a cgraph node for functions we'll reference.
3293 They can be non-existent if the reference comes from an entry
3294 of an external vtable for example. */
3295 cgraph_get_create_node (fn);
3297 return fn;
3300 /* Return true iff VAL is a gimple expression that is known to be
3301 non-negative. Restricted to floating-point inputs. */
3303 bool
3304 gimple_val_nonnegative_real_p (tree val)
3306 gimple def_stmt;
3308 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3310 /* Use existing logic for non-gimple trees. */
3311 if (tree_expr_nonnegative_p (val))
3312 return true;
3314 if (TREE_CODE (val) != SSA_NAME)
3315 return false;
3317 /* Currently we look only at the immediately defining statement
3318 to make this determination, since recursion on defining
3319 statements of operands can lead to quadratic behavior in the
3320 worst case. This is expected to catch almost all occurrences
3321 in practice. It would be possible to implement limited-depth
3322 recursion if important cases are lost. Alternatively, passes
3323 that need this information (such as the pow/powi lowering code
3324 in the cse_sincos pass) could be revised to provide it through
3325 dataflow propagation. */
3327 def_stmt = SSA_NAME_DEF_STMT (val);
3329 if (is_gimple_assign (def_stmt))
3331 tree op0, op1;
3333 /* See fold-const.c:tree_expr_nonnegative_p for additional
3334 cases that could be handled with recursion. */
3336 switch (gimple_assign_rhs_code (def_stmt))
3338 case ABS_EXPR:
3339 /* Always true for floating-point operands. */
3340 return true;
3342 case MULT_EXPR:
3343 /* True if the two operands are identical (since we are
3344 restricted to floating-point inputs). */
3345 op0 = gimple_assign_rhs1 (def_stmt);
3346 op1 = gimple_assign_rhs2 (def_stmt);
3348 if (op0 == op1
3349 || operand_equal_p (op0, op1, 0))
3350 return true;
3352 default:
3353 return false;
3356 else if (is_gimple_call (def_stmt))
3358 tree fndecl = gimple_call_fndecl (def_stmt);
3359 if (fndecl
3360 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3362 tree arg1;
3364 switch (DECL_FUNCTION_CODE (fndecl))
3366 CASE_FLT_FN (BUILT_IN_ACOS):
3367 CASE_FLT_FN (BUILT_IN_ACOSH):
3368 CASE_FLT_FN (BUILT_IN_CABS):
3369 CASE_FLT_FN (BUILT_IN_COSH):
3370 CASE_FLT_FN (BUILT_IN_ERFC):
3371 CASE_FLT_FN (BUILT_IN_EXP):
3372 CASE_FLT_FN (BUILT_IN_EXP10):
3373 CASE_FLT_FN (BUILT_IN_EXP2):
3374 CASE_FLT_FN (BUILT_IN_FABS):
3375 CASE_FLT_FN (BUILT_IN_FDIM):
3376 CASE_FLT_FN (BUILT_IN_HYPOT):
3377 CASE_FLT_FN (BUILT_IN_POW10):
3378 return true;
3380 CASE_FLT_FN (BUILT_IN_SQRT):
3381 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3382 nonnegative inputs. */
3383 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3384 return true;
3386 break;
3388 CASE_FLT_FN (BUILT_IN_POWI):
3389 /* True if the second argument is an even integer. */
3390 arg1 = gimple_call_arg (def_stmt, 1);
3392 if (TREE_CODE (arg1) == INTEGER_CST
3393 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3394 return true;
3396 break;
3398 CASE_FLT_FN (BUILT_IN_POW):
3399 /* True if the second argument is an even integer-valued
3400 real. */
3401 arg1 = gimple_call_arg (def_stmt, 1);
3403 if (TREE_CODE (arg1) == REAL_CST)
3405 REAL_VALUE_TYPE c;
3406 HOST_WIDE_INT n;
3408 c = TREE_REAL_CST (arg1);
3409 n = real_to_integer (&c);
3411 if ((n & 1) == 0)
3413 REAL_VALUE_TYPE cint;
3414 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3415 if (real_identical (&c, &cint))
3416 return true;
3420 break;
3422 default:
3423 return false;
3428 return false;
3431 /* Given a pointer value OP0, return a simplified version of an
3432 indirection through OP0, or NULL_TREE if no simplification is
3433 possible. Note that the resulting type may be different from
3434 the type pointed to in the sense that it is still compatible
3435 from the langhooks point of view. */
3437 tree
3438 gimple_fold_indirect_ref (tree t)
3440 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3441 tree sub = t;
3442 tree subtype;
3444 STRIP_NOPS (sub);
3445 subtype = TREE_TYPE (sub);
3446 if (!POINTER_TYPE_P (subtype))
3447 return NULL_TREE;
3449 if (TREE_CODE (sub) == ADDR_EXPR)
3451 tree op = TREE_OPERAND (sub, 0);
3452 tree optype = TREE_TYPE (op);
3453 /* *&p => p */
3454 if (useless_type_conversion_p (type, optype))
3455 return op;
3457 /* *(foo *)&fooarray => fooarray[0] */
3458 if (TREE_CODE (optype) == ARRAY_TYPE
3459 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3460 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3462 tree type_domain = TYPE_DOMAIN (optype);
3463 tree min_val = size_zero_node;
3464 if (type_domain && TYPE_MIN_VALUE (type_domain))
3465 min_val = TYPE_MIN_VALUE (type_domain);
3466 if (TREE_CODE (min_val) == INTEGER_CST)
3467 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3469 /* *(foo *)&complexfoo => __real__ complexfoo */
3470 else if (TREE_CODE (optype) == COMPLEX_TYPE
3471 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3472 return fold_build1 (REALPART_EXPR, type, op);
3473 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3474 else if (TREE_CODE (optype) == VECTOR_TYPE
3475 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3477 tree part_width = TYPE_SIZE (type);
3478 tree index = bitsize_int (0);
3479 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3483 /* *(p + CST) -> ... */
3484 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3485 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3487 tree addr = TREE_OPERAND (sub, 0);
3488 tree off = TREE_OPERAND (sub, 1);
3489 tree addrtype;
3491 STRIP_NOPS (addr);
3492 addrtype = TREE_TYPE (addr);
3494 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3495 if (TREE_CODE (addr) == ADDR_EXPR
3496 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3497 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3498 && tree_fits_uhwi_p (off))
3500 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
3501 tree part_width = TYPE_SIZE (type);
3502 unsigned HOST_WIDE_INT part_widthi
3503 = tree_to_shwi (part_width) / BITS_PER_UNIT;
3504 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3505 tree index = bitsize_int (indexi);
3506 if (offset / part_widthi
3507 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3508 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3509 part_width, index);
3512 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3513 if (TREE_CODE (addr) == ADDR_EXPR
3514 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3515 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3517 tree size = TYPE_SIZE_UNIT (type);
3518 if (tree_int_cst_equal (size, off))
3519 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3522 /* *(p + CST) -> MEM_REF <p, CST>. */
3523 if (TREE_CODE (addr) != ADDR_EXPR
3524 || DECL_P (TREE_OPERAND (addr, 0)))
3525 return fold_build2 (MEM_REF, type,
3526 addr,
3527 build_int_cst_wide (ptype,
3528 TREE_INT_CST_LOW (off),
3529 TREE_INT_CST_HIGH (off)));
3532 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3533 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3534 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3535 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3537 tree type_domain;
3538 tree min_val = size_zero_node;
3539 tree osub = sub;
3540 sub = gimple_fold_indirect_ref (sub);
3541 if (! sub)
3542 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3543 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3544 if (type_domain && TYPE_MIN_VALUE (type_domain))
3545 min_val = TYPE_MIN_VALUE (type_domain);
3546 if (TREE_CODE (min_val) == INTEGER_CST)
3547 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3550 return NULL_TREE;
3553 /* Return true if CODE is an operation that when operating on signed
3554 integer types involves undefined behavior on overflow and the
3555 operation can be expressed with unsigned arithmetic. */
3557 bool
3558 arith_code_with_undefined_signed_overflow (tree_code code)
3560 switch (code)
3562 case PLUS_EXPR:
3563 case MINUS_EXPR:
3564 case MULT_EXPR:
3565 case NEGATE_EXPR:
3566 case POINTER_PLUS_EXPR:
3567 return true;
3568 default:
3569 return false;
3573 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
3574 operation that can be transformed to unsigned arithmetic by converting
3575 its operand, carrying out the operation in the corresponding unsigned
3576 type and converting the result back to the original type.
3578 Returns a sequence of statements that replace STMT and also contain
3579 a modified form of STMT itself. */
3581 gimple_seq
3582 rewrite_to_defined_overflow (gimple stmt)
3584 if (dump_file && (dump_flags & TDF_DETAILS))
3586 fprintf (dump_file, "rewriting stmt with undefined signed "
3587 "overflow ");
3588 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3591 tree lhs = gimple_assign_lhs (stmt);
3592 tree type = unsigned_type_for (TREE_TYPE (lhs));
3593 gimple_seq stmts = NULL;
3594 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
3596 gimple_seq stmts2 = NULL;
3597 gimple_set_op (stmt, i,
3598 force_gimple_operand (fold_convert (type,
3599 gimple_op (stmt, i)),
3600 &stmts2, true, NULL_TREE));
3601 gimple_seq_add_seq (&stmts, stmts2);
3603 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
3604 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3605 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
3606 gimple_seq_add_stmt (&stmts, stmt);
3607 gimple cvt = gimple_build_assign_with_ops
3608 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
3609 gimple_seq_add_stmt (&stmts, cvt);
3611 return stmts;