2014-10-24 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / gimple-fold.c
blobb2627197b448c633cfd30f0e78733b34061cb58b
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "hashtab.h"
32 #include "hash-set.h"
33 #include "vec.h"
34 #include "machmode.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "dumpfile.h"
39 #include "bitmap.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-fold.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "gimple-ssa.h"
50 #include "tree-ssanames.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "tree-ssa-propagate.h"
55 #include "target.h"
56 #include "ipa-utils.h"
57 #include "gimple-pretty-print.h"
58 #include "tree-ssa-address.h"
59 #include "langhooks.h"
60 #include "gimplify-me.h"
61 #include "dbgcnt.h"
62 #include "builtins.h"
63 #include "output.h"
64 #include "tree-eh.h"
65 #include "gimple-match.h"
67 /* Return true when DECL can be referenced from current unit.
68 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
69 We can get declarations that are not possible to reference for various
70 reasons:
72 1) When analyzing C++ virtual tables.
73 C++ virtual tables do have known constructors even
74 when they are keyed to other compilation unit.
75 Those tables can contain pointers to methods and vars
76 in other units. Those methods have both STATIC and EXTERNAL
77 set.
78 2) In WHOPR mode devirtualization might lead to reference
79 to method that was partitioned elsehwere.
80 In this case we have static VAR_DECL or FUNCTION_DECL
81 that has no corresponding callgraph/varpool node
82 declaring the body.
83 3) COMDAT functions referred by external vtables that
84 we devirtualize only during final compilation stage.
85 At this time we already decided that we will not output
86 the function body and thus we can't reference the symbol
87 directly. */
89 static bool
90 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
92 varpool_node *vnode;
93 struct cgraph_node *node;
94 symtab_node *snode;
96 if (DECL_ABSTRACT_P (decl))
97 return false;
99 /* We are concerned only about static/external vars and functions. */
100 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
101 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
102 return true;
104 /* Static objects can be referred only if they was not optimized out yet. */
105 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
107 /* Before we start optimizing unreachable code we can be sure all
108 static objects are defined. */
109 if (symtab->function_flags_ready)
110 return true;
111 snode = symtab_node::get (decl);
112 if (!snode || !snode->definition)
113 return false;
114 node = dyn_cast <cgraph_node *> (snode);
115 return !node || !node->global.inlined_to;
118 /* We will later output the initializer, so we can refer to it.
119 So we are concerned only when DECL comes from initializer of
120 external var or var that has been optimized out. */
121 if (!from_decl
122 || TREE_CODE (from_decl) != VAR_DECL
123 || (!DECL_EXTERNAL (from_decl)
124 && (vnode = varpool_node::get (from_decl)) != NULL
125 && vnode->definition)
126 || (flag_ltrans
127 && (vnode = varpool_node::get (from_decl)) != NULL
128 && vnode->in_other_partition))
129 return true;
130 /* We are folding reference from external vtable. The vtable may reffer
131 to a symbol keyed to other compilation unit. The other compilation
132 unit may be in separate DSO and the symbol may be hidden. */
133 if (DECL_VISIBILITY_SPECIFIED (decl)
134 && DECL_EXTERNAL (decl)
135 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
136 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
137 return false;
138 /* When function is public, we always can introduce new reference.
139 Exception are the COMDAT functions where introducing a direct
140 reference imply need to include function body in the curren tunit. */
141 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
142 return true;
143 /* We have COMDAT. We are going to check if we still have definition
144 or if the definition is going to be output in other partition.
145 Bypass this when gimplifying; all needed functions will be produced.
147 As observed in PR20991 for already optimized out comdat virtual functions
148 it may be tempting to not necessarily give up because the copy will be
149 output elsewhere when corresponding vtable is output.
150 This is however not possible - ABI specify that COMDATs are output in
151 units where they are used and when the other unit was compiled with LTO
152 it is possible that vtable was kept public while the function itself
153 was privatized. */
154 if (!symtab->function_flags_ready)
155 return true;
157 snode = symtab_node::get (decl);
158 if (!snode
159 || ((!snode->definition || DECL_EXTERNAL (decl))
160 && (!snode->in_other_partition
161 || (!snode->forced_by_abi && !snode->force_output))))
162 return false;
163 node = dyn_cast <cgraph_node *> (snode);
164 return !node || !node->global.inlined_to;
167 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
168 acceptable form for is_gimple_min_invariant.
169 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
171 tree
172 canonicalize_constructor_val (tree cval, tree from_decl)
174 tree orig_cval = cval;
175 STRIP_NOPS (cval);
176 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
177 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
179 tree ptr = TREE_OPERAND (cval, 0);
180 if (is_gimple_min_invariant (ptr))
181 cval = build1_loc (EXPR_LOCATION (cval),
182 ADDR_EXPR, TREE_TYPE (ptr),
183 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
184 ptr,
185 fold_convert (ptr_type_node,
186 TREE_OPERAND (cval, 1))));
188 if (TREE_CODE (cval) == ADDR_EXPR)
190 tree base = NULL_TREE;
191 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
193 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
194 if (base)
195 TREE_OPERAND (cval, 0) = base;
197 else
198 base = get_base_address (TREE_OPERAND (cval, 0));
199 if (!base)
200 return NULL_TREE;
202 if ((TREE_CODE (base) == VAR_DECL
203 || TREE_CODE (base) == FUNCTION_DECL)
204 && !can_refer_decl_in_current_unit_p (base, from_decl))
205 return NULL_TREE;
206 if (TREE_CODE (base) == VAR_DECL)
207 TREE_ADDRESSABLE (base) = 1;
208 else if (TREE_CODE (base) == FUNCTION_DECL)
210 /* Make sure we create a cgraph node for functions we'll reference.
211 They can be non-existent if the reference comes from an entry
212 of an external vtable for example. */
213 cgraph_node::get_create (base);
215 /* Fixup types in global initializers. */
216 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
217 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
219 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
220 cval = fold_convert (TREE_TYPE (orig_cval), cval);
221 return cval;
223 if (TREE_OVERFLOW_P (cval))
224 return drop_tree_overflow (cval);
225 return orig_cval;
228 /* If SYM is a constant variable with known value, return the value.
229 NULL_TREE is returned otherwise. */
231 tree
232 get_symbol_constant_value (tree sym)
234 tree val = ctor_for_folding (sym);
235 if (val != error_mark_node)
237 if (val)
239 val = canonicalize_constructor_val (unshare_expr (val), sym);
240 if (val && is_gimple_min_invariant (val))
241 return val;
242 else
243 return NULL_TREE;
245 /* Variables declared 'const' without an initializer
246 have zero as the initializer if they may not be
247 overridden at link or run time. */
248 if (!val
249 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
250 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
251 return build_zero_cst (TREE_TYPE (sym));
254 return NULL_TREE;
259 /* Subroutine of fold_stmt. We perform several simplifications of the
260 memory reference tree EXPR and make sure to re-gimplify them properly
261 after propagation of constant addresses. IS_LHS is true if the
262 reference is supposed to be an lvalue. */
264 static tree
265 maybe_fold_reference (tree expr, bool is_lhs)
267 tree result;
269 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
270 || TREE_CODE (expr) == REALPART_EXPR
271 || TREE_CODE (expr) == IMAGPART_EXPR)
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
273 return fold_unary_loc (EXPR_LOCATION (expr),
274 TREE_CODE (expr),
275 TREE_TYPE (expr),
276 TREE_OPERAND (expr, 0));
277 else if (TREE_CODE (expr) == BIT_FIELD_REF
278 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
279 return fold_ternary_loc (EXPR_LOCATION (expr),
280 TREE_CODE (expr),
281 TREE_TYPE (expr),
282 TREE_OPERAND (expr, 0),
283 TREE_OPERAND (expr, 1),
284 TREE_OPERAND (expr, 2));
286 if (!is_lhs
287 && (result = fold_const_aggregate_ref (expr))
288 && is_gimple_min_invariant (result))
289 return result;
291 return NULL_TREE;
295 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
296 replacement rhs for the statement or NULL_TREE if no simplification
297 could be made. It is assumed that the operands have been previously
298 folded. */
300 static tree
301 fold_gimple_assign (gimple_stmt_iterator *si)
303 gimple stmt = gsi_stmt (*si);
304 enum tree_code subcode = gimple_assign_rhs_code (stmt);
305 location_t loc = gimple_location (stmt);
307 tree result = NULL_TREE;
309 switch (get_gimple_rhs_class (subcode))
311 case GIMPLE_SINGLE_RHS:
313 tree rhs = gimple_assign_rhs1 (stmt);
315 if (REFERENCE_CLASS_P (rhs))
316 return maybe_fold_reference (rhs, false);
318 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
320 tree val = OBJ_TYPE_REF_EXPR (rhs);
321 if (is_gimple_min_invariant (val))
322 return val;
323 else if (flag_devirtualize && virtual_method_call_p (rhs))
325 bool final;
326 vec <cgraph_node *>targets
327 = possible_polymorphic_call_targets (rhs, stmt, &final);
328 if (final && targets.length () <= 1 && dbg_cnt (devirt))
330 if (dump_enabled_p ())
332 location_t loc = gimple_location_safe (stmt);
333 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
334 "resolving virtual function address "
335 "reference to function %s\n",
336 targets.length () == 1
337 ? targets[0]->name ()
338 : "NULL");
340 if (targets.length () == 1)
342 val = fold_convert (TREE_TYPE (val),
343 build_fold_addr_expr_loc
344 (loc, targets[0]->decl));
345 STRIP_USELESS_TYPE_CONVERSION (val);
347 else
348 /* We can not use __builtin_unreachable here because it
349 can not have address taken. */
350 val = build_int_cst (TREE_TYPE (val), 0);
351 return val;
356 else if (TREE_CODE (rhs) == ADDR_EXPR)
358 tree ref = TREE_OPERAND (rhs, 0);
359 tree tem = maybe_fold_reference (ref, true);
360 if (tem
361 && TREE_CODE (tem) == MEM_REF
362 && integer_zerop (TREE_OPERAND (tem, 1)))
363 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
364 else if (tem)
365 result = fold_convert (TREE_TYPE (rhs),
366 build_fold_addr_expr_loc (loc, tem));
367 else if (TREE_CODE (ref) == MEM_REF
368 && integer_zerop (TREE_OPERAND (ref, 1)))
369 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
372 else if (TREE_CODE (rhs) == CONSTRUCTOR
373 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
374 && (CONSTRUCTOR_NELTS (rhs)
375 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
377 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
378 unsigned i;
379 tree val;
381 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
382 if (TREE_CODE (val) != INTEGER_CST
383 && TREE_CODE (val) != REAL_CST
384 && TREE_CODE (val) != FIXED_CST)
385 return NULL_TREE;
387 return build_vector_from_ctor (TREE_TYPE (rhs),
388 CONSTRUCTOR_ELTS (rhs));
391 else if (DECL_P (rhs))
392 return get_symbol_constant_value (rhs);
394 /* If we couldn't fold the RHS, hand over to the generic
395 fold routines. */
396 if (result == NULL_TREE)
397 result = fold (rhs);
399 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
400 that may have been added by fold, and "useless" type
401 conversions that might now be apparent due to propagation. */
402 STRIP_USELESS_TYPE_CONVERSION (result);
404 if (result != rhs && valid_gimple_rhs_p (result))
405 return result;
407 return NULL_TREE;
409 break;
411 case GIMPLE_UNARY_RHS:
413 tree rhs = gimple_assign_rhs1 (stmt);
415 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
416 if (result)
418 /* If the operation was a conversion do _not_ mark a
419 resulting constant with TREE_OVERFLOW if the original
420 constant was not. These conversions have implementation
421 defined behavior and retaining the TREE_OVERFLOW flag
422 here would confuse later passes such as VRP. */
423 if (CONVERT_EXPR_CODE_P (subcode)
424 && TREE_CODE (result) == INTEGER_CST
425 && TREE_CODE (rhs) == INTEGER_CST)
426 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
428 STRIP_USELESS_TYPE_CONVERSION (result);
429 if (valid_gimple_rhs_p (result))
430 return result;
433 break;
435 case GIMPLE_BINARY_RHS:
436 /* Try to canonicalize for boolean-typed X the comparisons
437 X == 0, X == 1, X != 0, and X != 1. */
438 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
439 || gimple_assign_rhs_code (stmt) == NE_EXPR)
441 tree lhs = gimple_assign_lhs (stmt);
442 tree op1 = gimple_assign_rhs1 (stmt);
443 tree op2 = gimple_assign_rhs2 (stmt);
444 tree type = TREE_TYPE (op1);
446 /* Check whether the comparison operands are of the same boolean
447 type as the result type is.
448 Check that second operand is an integer-constant with value
449 one or zero. */
450 if (TREE_CODE (op2) == INTEGER_CST
451 && (integer_zerop (op2) || integer_onep (op2))
452 && useless_type_conversion_p (TREE_TYPE (lhs), type))
454 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
455 bool is_logical_not = false;
457 /* X == 0 and X != 1 is a logical-not.of X
458 X == 1 and X != 0 is X */
459 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
460 || (cmp_code == NE_EXPR && integer_onep (op2)))
461 is_logical_not = true;
463 if (is_logical_not == false)
464 result = op1;
465 /* Only for one-bit precision typed X the transformation
466 !X -> ~X is valied. */
467 else if (TYPE_PRECISION (type) == 1)
468 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
469 type, op1);
470 /* Otherwise we use !X -> X ^ 1. */
471 else
472 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
473 type, op1, build_int_cst (type, 1));
478 if (!result)
479 result = fold_binary_loc (loc, subcode,
480 TREE_TYPE (gimple_assign_lhs (stmt)),
481 gimple_assign_rhs1 (stmt),
482 gimple_assign_rhs2 (stmt));
484 if (result)
486 STRIP_USELESS_TYPE_CONVERSION (result);
487 if (valid_gimple_rhs_p (result))
488 return result;
490 break;
492 case GIMPLE_TERNARY_RHS:
493 /* Try to fold a conditional expression. */
494 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
496 tree op0 = gimple_assign_rhs1 (stmt);
497 tree tem;
498 bool set = false;
499 location_t cond_loc = gimple_location (stmt);
501 if (COMPARISON_CLASS_P (op0))
503 fold_defer_overflow_warnings ();
504 tem = fold_binary_loc (cond_loc,
505 TREE_CODE (op0), TREE_TYPE (op0),
506 TREE_OPERAND (op0, 0),
507 TREE_OPERAND (op0, 1));
508 /* This is actually a conditional expression, not a GIMPLE
509 conditional statement, however, the valid_gimple_rhs_p
510 test still applies. */
511 set = (tem && is_gimple_condexpr (tem)
512 && valid_gimple_rhs_p (tem));
513 fold_undefer_overflow_warnings (set, stmt, 0);
515 else if (is_gimple_min_invariant (op0))
517 tem = op0;
518 set = true;
520 else
521 return NULL_TREE;
523 if (set)
524 result = fold_build3_loc (cond_loc, COND_EXPR,
525 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
526 gimple_assign_rhs2 (stmt),
527 gimple_assign_rhs3 (stmt));
530 if (!result)
531 result = fold_ternary_loc (loc, subcode,
532 TREE_TYPE (gimple_assign_lhs (stmt)),
533 gimple_assign_rhs1 (stmt),
534 gimple_assign_rhs2 (stmt),
535 gimple_assign_rhs3 (stmt));
537 if (result)
539 STRIP_USELESS_TYPE_CONVERSION (result);
540 if (valid_gimple_rhs_p (result))
541 return result;
543 break;
545 case GIMPLE_INVALID_RHS:
546 gcc_unreachable ();
549 return NULL_TREE;
552 /* Attempt to fold a conditional statement. Return true if any changes were
553 made. We only attempt to fold the condition expression, and do not perform
554 any transformation that would require alteration of the cfg. It is
555 assumed that the operands have been previously folded. */
557 static bool
558 fold_gimple_cond (gimple stmt)
560 tree result = fold_binary_loc (gimple_location (stmt),
561 gimple_cond_code (stmt),
562 boolean_type_node,
563 gimple_cond_lhs (stmt),
564 gimple_cond_rhs (stmt));
566 if (result)
568 STRIP_USELESS_TYPE_CONVERSION (result);
569 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
571 gimple_cond_set_condition_from_tree (stmt, result);
572 return true;
576 return false;
580 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
581 adjusting the replacement stmts location and virtual operands.
582 If the statement has a lhs the last stmt in the sequence is expected
583 to assign to that lhs. */
585 static void
586 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
588 gimple stmt = gsi_stmt (*si_p);
590 if (gimple_has_location (stmt))
591 annotate_all_with_location (stmts, gimple_location (stmt));
593 /* First iterate over the replacement statements backward, assigning
594 virtual operands to their defining statements. */
595 gimple laststore = NULL;
596 for (gimple_stmt_iterator i = gsi_last (stmts);
597 !gsi_end_p (i); gsi_prev (&i))
599 gimple new_stmt = gsi_stmt (i);
600 if ((gimple_assign_single_p (new_stmt)
601 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
602 || (is_gimple_call (new_stmt)
603 && (gimple_call_flags (new_stmt)
604 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
606 tree vdef;
607 if (!laststore)
608 vdef = gimple_vdef (stmt);
609 else
610 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
611 gimple_set_vdef (new_stmt, vdef);
612 if (vdef && TREE_CODE (vdef) == SSA_NAME)
613 SSA_NAME_DEF_STMT (vdef) = new_stmt;
614 laststore = new_stmt;
618 /* Second iterate over the statements forward, assigning virtual
619 operands to their uses. */
620 tree reaching_vuse = gimple_vuse (stmt);
621 for (gimple_stmt_iterator i = gsi_start (stmts);
622 !gsi_end_p (i); gsi_next (&i))
624 gimple new_stmt = gsi_stmt (i);
625 /* If the new statement possibly has a VUSE, update it with exact SSA
626 name we know will reach this one. */
627 if (gimple_has_mem_ops (new_stmt))
628 gimple_set_vuse (new_stmt, reaching_vuse);
629 gimple_set_modified (new_stmt, true);
630 if (gimple_vdef (new_stmt))
631 reaching_vuse = gimple_vdef (new_stmt);
634 /* If the new sequence does not do a store release the virtual
635 definition of the original statement. */
636 if (reaching_vuse
637 && reaching_vuse == gimple_vuse (stmt))
639 tree vdef = gimple_vdef (stmt);
640 if (vdef
641 && TREE_CODE (vdef) == SSA_NAME)
643 unlink_stmt_vdef (stmt);
644 release_ssa_name (vdef);
648 /* Finally replace the original statement with the sequence. */
649 gsi_replace_with_seq (si_p, stmts, false);
652 /* Convert EXPR into a GIMPLE value suitable for substitution on the
653 RHS of an assignment. Insert the necessary statements before
654 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
655 is replaced. If the call is expected to produces a result, then it
656 is replaced by an assignment of the new RHS to the result variable.
657 If the result is to be ignored, then the call is replaced by a
658 GIMPLE_NOP. A proper VDEF chain is retained by making the first
659 VUSE and the last VDEF of the whole sequence be the same as the replaced
660 statement and using new SSA names for stores in between. */
662 void
663 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
665 tree lhs;
666 gimple stmt, new_stmt;
667 gimple_stmt_iterator i;
668 gimple_seq stmts = NULL;
670 stmt = gsi_stmt (*si_p);
672 gcc_assert (is_gimple_call (stmt));
674 push_gimplify_context (gimple_in_ssa_p (cfun));
676 lhs = gimple_call_lhs (stmt);
677 if (lhs == NULL_TREE)
679 gimplify_and_add (expr, &stmts);
680 /* We can end up with folding a memcpy of an empty class assignment
681 which gets optimized away by C++ gimplification. */
682 if (gimple_seq_empty_p (stmts))
684 pop_gimplify_context (NULL);
685 if (gimple_in_ssa_p (cfun))
687 unlink_stmt_vdef (stmt);
688 release_defs (stmt);
690 gsi_replace (si_p, gimple_build_nop (), true);
691 return;
694 else
696 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
697 new_stmt = gimple_build_assign (lhs, tmp);
698 i = gsi_last (stmts);
699 gsi_insert_after_without_update (&i, new_stmt,
700 GSI_CONTINUE_LINKING);
703 pop_gimplify_context (NULL);
705 gsi_replace_with_seq_vops (si_p, stmts);
709 /* Replace the call at *GSI with the gimple value VAL. */
711 static void
712 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
714 gimple stmt = gsi_stmt (*gsi);
715 tree lhs = gimple_call_lhs (stmt);
716 gimple repl;
717 if (lhs)
719 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
720 val = fold_convert (TREE_TYPE (lhs), val);
721 repl = gimple_build_assign (lhs, val);
723 else
724 repl = gimple_build_nop ();
725 tree vdef = gimple_vdef (stmt);
726 if (vdef && TREE_CODE (vdef) == SSA_NAME)
728 unlink_stmt_vdef (stmt);
729 release_ssa_name (vdef);
731 gsi_replace (gsi, repl, true);
734 /* Replace the call at *GSI with the new call REPL and fold that
735 again. */
737 static void
738 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
740 gimple stmt = gsi_stmt (*gsi);
741 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
742 gimple_set_location (repl, gimple_location (stmt));
743 if (gimple_vdef (stmt)
744 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
746 gimple_set_vdef (repl, gimple_vdef (stmt));
747 gimple_set_vuse (repl, gimple_vuse (stmt));
748 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
750 gsi_replace (gsi, repl, true);
751 fold_stmt (gsi);
754 /* Return true if VAR is a VAR_DECL or a component thereof. */
756 static bool
757 var_decl_component_p (tree var)
759 tree inner = var;
760 while (handled_component_p (inner))
761 inner = TREE_OPERAND (inner, 0);
762 return SSA_VAR_P (inner);
765 /* Fold function call to builtin mem{{,p}cpy,move}. Return
766 NULL_TREE if no simplification can be made.
767 If ENDP is 0, return DEST (like memcpy).
768 If ENDP is 1, return DEST+LEN (like mempcpy).
769 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
770 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
771 (memmove). */
773 static bool
774 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
775 tree dest, tree src, int endp)
777 gimple stmt = gsi_stmt (*gsi);
778 tree lhs = gimple_call_lhs (stmt);
779 tree len = gimple_call_arg (stmt, 2);
780 tree destvar, srcvar;
781 location_t loc = gimple_location (stmt);
783 /* If the LEN parameter is zero, return DEST. */
784 if (integer_zerop (len))
786 gimple repl;
787 if (gimple_call_lhs (stmt))
788 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
789 else
790 repl = gimple_build_nop ();
791 tree vdef = gimple_vdef (stmt);
792 if (vdef && TREE_CODE (vdef) == SSA_NAME)
794 unlink_stmt_vdef (stmt);
795 release_ssa_name (vdef);
797 gsi_replace (gsi, repl, true);
798 return true;
801 /* If SRC and DEST are the same (and not volatile), return
802 DEST{,+LEN,+LEN-1}. */
803 if (operand_equal_p (src, dest, 0))
805 unlink_stmt_vdef (stmt);
806 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
807 release_ssa_name (gimple_vdef (stmt));
808 if (!lhs)
810 gsi_replace (gsi, gimple_build_nop (), true);
811 return true;
813 goto done;
815 else
817 tree srctype, desttype;
818 unsigned int src_align, dest_align;
819 tree off0;
821 /* Build accesses at offset zero with a ref-all character type. */
822 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
823 ptr_mode, true), 0);
825 /* If we can perform the copy efficiently with first doing all loads
826 and then all stores inline it that way. Currently efficiently
827 means that we can load all the memory into a single integer
828 register which is what MOVE_MAX gives us. */
829 src_align = get_pointer_alignment (src);
830 dest_align = get_pointer_alignment (dest);
831 if (tree_fits_uhwi_p (len)
832 && compare_tree_int (len, MOVE_MAX) <= 0
833 /* ??? Don't transform copies from strings with known length this
834 confuses the tree-ssa-strlen.c. This doesn't handle
835 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
836 reason. */
837 && !c_strlen (src, 2))
839 unsigned ilen = tree_to_uhwi (len);
840 if (exact_log2 (ilen) != -1)
842 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
843 if (type
844 && TYPE_MODE (type) != BLKmode
845 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
846 == ilen * 8)
847 /* If the destination pointer is not aligned we must be able
848 to emit an unaligned store. */
849 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
850 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
852 tree srctype = type;
853 tree desttype = type;
854 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
855 srctype = build_aligned_type (type, src_align);
856 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
857 tree tem = fold_const_aggregate_ref (srcmem);
858 if (tem)
859 srcmem = tem;
860 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
861 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
862 src_align))
863 srcmem = NULL_TREE;
864 if (srcmem)
866 gimple new_stmt;
867 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
869 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
870 if (gimple_in_ssa_p (cfun))
871 srcmem = make_ssa_name (TREE_TYPE (srcmem),
872 new_stmt);
873 else
874 srcmem = create_tmp_reg (TREE_TYPE (srcmem),
875 NULL);
876 gimple_assign_set_lhs (new_stmt, srcmem);
877 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
878 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
880 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
881 desttype = build_aligned_type (type, dest_align);
882 new_stmt
883 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
884 dest, off0),
885 srcmem);
886 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
887 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
888 if (gimple_vdef (new_stmt)
889 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
890 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
891 if (!lhs)
893 gsi_replace (gsi, new_stmt, true);
894 return true;
896 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
897 goto done;
903 if (endp == 3)
905 /* Both DEST and SRC must be pointer types.
906 ??? This is what old code did. Is the testing for pointer types
907 really mandatory?
909 If either SRC is readonly or length is 1, we can use memcpy. */
910 if (!dest_align || !src_align)
911 return false;
912 if (readonly_data_expr (src)
913 || (tree_fits_uhwi_p (len)
914 && (MIN (src_align, dest_align) / BITS_PER_UNIT
915 >= tree_to_uhwi (len))))
917 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
918 if (!fn)
919 return false;
920 gimple_call_set_fndecl (stmt, fn);
921 gimple_call_set_arg (stmt, 0, dest);
922 gimple_call_set_arg (stmt, 1, src);
923 fold_stmt (gsi);
924 return true;
927 /* If *src and *dest can't overlap, optimize into memcpy as well. */
928 if (TREE_CODE (src) == ADDR_EXPR
929 && TREE_CODE (dest) == ADDR_EXPR)
931 tree src_base, dest_base, fn;
932 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
933 HOST_WIDE_INT size = -1;
934 HOST_WIDE_INT maxsize = -1;
936 srcvar = TREE_OPERAND (src, 0);
937 src_base = get_ref_base_and_extent (srcvar, &src_offset,
938 &size, &maxsize);
939 destvar = TREE_OPERAND (dest, 0);
940 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
941 &size, &maxsize);
942 if (tree_fits_uhwi_p (len))
943 maxsize = tree_to_uhwi (len);
944 else
945 maxsize = -1;
946 src_offset /= BITS_PER_UNIT;
947 dest_offset /= BITS_PER_UNIT;
948 if (SSA_VAR_P (src_base)
949 && SSA_VAR_P (dest_base))
951 if (operand_equal_p (src_base, dest_base, 0)
952 && ranges_overlap_p (src_offset, maxsize,
953 dest_offset, maxsize))
954 return false;
956 else if (TREE_CODE (src_base) == MEM_REF
957 && TREE_CODE (dest_base) == MEM_REF)
959 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
960 TREE_OPERAND (dest_base, 0), 0))
961 return false;
962 offset_int off = mem_ref_offset (src_base) + src_offset;
963 if (!wi::fits_shwi_p (off))
964 return false;
965 src_offset = off.to_shwi ();
967 off = mem_ref_offset (dest_base) + dest_offset;
968 if (!wi::fits_shwi_p (off))
969 return false;
970 dest_offset = off.to_shwi ();
971 if (ranges_overlap_p (src_offset, maxsize,
972 dest_offset, maxsize))
973 return false;
975 else
976 return false;
978 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
979 if (!fn)
980 return false;
981 gimple_call_set_fndecl (stmt, fn);
982 gimple_call_set_arg (stmt, 0, dest);
983 gimple_call_set_arg (stmt, 1, src);
984 fold_stmt (gsi);
985 return true;
988 /* If the destination and source do not alias optimize into
989 memcpy as well. */
990 if ((is_gimple_min_invariant (dest)
991 || TREE_CODE (dest) == SSA_NAME)
992 && (is_gimple_min_invariant (src)
993 || TREE_CODE (src) == SSA_NAME))
995 ao_ref destr, srcr;
996 ao_ref_init_from_ptr_and_size (&destr, dest, len);
997 ao_ref_init_from_ptr_and_size (&srcr, src, len);
998 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1000 tree fn;
1001 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1002 if (!fn)
1003 return false;
1004 gimple_call_set_fndecl (stmt, fn);
1005 gimple_call_set_arg (stmt, 0, dest);
1006 gimple_call_set_arg (stmt, 1, src);
1007 fold_stmt (gsi);
1008 return true;
1012 return false;
1015 if (!tree_fits_shwi_p (len))
1016 return false;
1017 /* FIXME:
1018 This logic lose for arguments like (type *)malloc (sizeof (type)),
1019 since we strip the casts of up to VOID return value from malloc.
1020 Perhaps we ought to inherit type from non-VOID argument here? */
1021 STRIP_NOPS (src);
1022 STRIP_NOPS (dest);
1023 if (!POINTER_TYPE_P (TREE_TYPE (src))
1024 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1025 return false;
1026 /* In the following try to find a type that is most natural to be
1027 used for the memcpy source and destination and that allows
1028 the most optimization when memcpy is turned into a plain assignment
1029 using that type. In theory we could always use a char[len] type
1030 but that only gains us that the destination and source possibly
1031 no longer will have their address taken. */
1032 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1033 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1035 tree tem = TREE_OPERAND (src, 0);
1036 STRIP_NOPS (tem);
1037 if (tem != TREE_OPERAND (src, 0))
1038 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1040 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1042 tree tem = TREE_OPERAND (dest, 0);
1043 STRIP_NOPS (tem);
1044 if (tem != TREE_OPERAND (dest, 0))
1045 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1047 srctype = TREE_TYPE (TREE_TYPE (src));
1048 if (TREE_CODE (srctype) == ARRAY_TYPE
1049 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1051 srctype = TREE_TYPE (srctype);
1052 STRIP_NOPS (src);
1053 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1055 desttype = TREE_TYPE (TREE_TYPE (dest));
1056 if (TREE_CODE (desttype) == ARRAY_TYPE
1057 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1059 desttype = TREE_TYPE (desttype);
1060 STRIP_NOPS (dest);
1061 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1063 if (TREE_ADDRESSABLE (srctype)
1064 || TREE_ADDRESSABLE (desttype))
1065 return false;
1067 /* Make sure we are not copying using a floating-point mode or
1068 a type whose size possibly does not match its precision. */
1069 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1070 || TREE_CODE (desttype) == BOOLEAN_TYPE
1071 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1072 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1073 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1074 || TREE_CODE (srctype) == BOOLEAN_TYPE
1075 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1076 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1077 if (!srctype)
1078 srctype = desttype;
1079 if (!desttype)
1080 desttype = srctype;
1081 if (!srctype)
1082 return false;
1084 src_align = get_pointer_alignment (src);
1085 dest_align = get_pointer_alignment (dest);
1086 if (dest_align < TYPE_ALIGN (desttype)
1087 || src_align < TYPE_ALIGN (srctype))
1088 return false;
1090 destvar = dest;
1091 STRIP_NOPS (destvar);
1092 if (TREE_CODE (destvar) == ADDR_EXPR
1093 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1094 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1095 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1096 else
1097 destvar = NULL_TREE;
1099 srcvar = src;
1100 STRIP_NOPS (srcvar);
1101 if (TREE_CODE (srcvar) == ADDR_EXPR
1102 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1103 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1105 if (!destvar
1106 || src_align >= TYPE_ALIGN (desttype))
1107 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1108 srcvar, off0);
1109 else if (!STRICT_ALIGNMENT)
1111 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1112 src_align);
1113 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1115 else
1116 srcvar = NULL_TREE;
1118 else
1119 srcvar = NULL_TREE;
1121 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1122 return false;
1124 if (srcvar == NULL_TREE)
1126 STRIP_NOPS (src);
1127 if (src_align >= TYPE_ALIGN (desttype))
1128 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1129 else
1131 if (STRICT_ALIGNMENT)
1132 return false;
1133 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1134 src_align);
1135 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1138 else if (destvar == NULL_TREE)
1140 STRIP_NOPS (dest);
1141 if (dest_align >= TYPE_ALIGN (srctype))
1142 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1143 else
1145 if (STRICT_ALIGNMENT)
1146 return false;
1147 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1148 dest_align);
1149 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1153 gimple new_stmt;
1154 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1156 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1157 if (gimple_in_ssa_p (cfun))
1158 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1159 else
1160 srcvar = create_tmp_reg (TREE_TYPE (srcvar), NULL);
1161 gimple_assign_set_lhs (new_stmt, srcvar);
1162 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1163 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1165 new_stmt = gimple_build_assign (destvar, srcvar);
1166 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1167 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1168 if (gimple_vdef (new_stmt)
1169 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1170 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1171 if (!lhs)
1173 gsi_replace (gsi, new_stmt, true);
1174 return true;
1176 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1179 done:
1180 if (endp == 0 || endp == 3)
1181 len = NULL_TREE;
1182 else if (endp == 2)
1183 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1184 ssize_int (1));
1185 if (endp == 2 || endp == 1)
1186 dest = fold_build_pointer_plus_loc (loc, dest, len);
1188 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1189 GSI_SAME_STMT);
1190 gimple repl = gimple_build_assign (lhs, dest);
1191 gsi_replace (gsi, repl, true);
1192 return true;
1195 /* Fold function call to builtin memset or bzero at *GSI setting the
1196 memory of size LEN to VAL. Return whether a simplification was made. */
1198 static bool
1199 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1201 gimple stmt = gsi_stmt (*gsi);
1202 tree etype;
1203 unsigned HOST_WIDE_INT length, cval;
1205 /* If the LEN parameter is zero, return DEST. */
1206 if (integer_zerop (len))
1208 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1209 return true;
1212 if (! tree_fits_uhwi_p (len))
1213 return false;
1215 if (TREE_CODE (c) != INTEGER_CST)
1216 return false;
1218 tree dest = gimple_call_arg (stmt, 0);
1219 tree var = dest;
1220 if (TREE_CODE (var) != ADDR_EXPR)
1221 return false;
1223 var = TREE_OPERAND (var, 0);
1224 if (TREE_THIS_VOLATILE (var))
1225 return false;
1227 etype = TREE_TYPE (var);
1228 if (TREE_CODE (etype) == ARRAY_TYPE)
1229 etype = TREE_TYPE (etype);
1231 if (!INTEGRAL_TYPE_P (etype)
1232 && !POINTER_TYPE_P (etype))
1233 return NULL_TREE;
1235 if (! var_decl_component_p (var))
1236 return NULL_TREE;
1238 length = tree_to_uhwi (len);
1239 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1240 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1241 return NULL_TREE;
1243 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1244 return NULL_TREE;
1246 if (integer_zerop (c))
1247 cval = 0;
1248 else
1250 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1251 return NULL_TREE;
1253 cval = TREE_INT_CST_LOW (c);
1254 cval &= 0xff;
1255 cval |= cval << 8;
1256 cval |= cval << 16;
1257 cval |= (cval << 31) << 1;
1260 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1261 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1262 gimple_set_vuse (store, gimple_vuse (stmt));
1263 tree vdef = gimple_vdef (stmt);
1264 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1266 gimple_set_vdef (store, gimple_vdef (stmt));
1267 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1269 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1270 if (gimple_call_lhs (stmt))
1272 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1273 gsi_replace (gsi, asgn, true);
1275 else
1277 gimple_stmt_iterator gsi2 = *gsi;
1278 gsi_prev (gsi);
1279 gsi_remove (&gsi2, true);
1282 return true;
1286 /* Return the string length, maximum string length or maximum value of
1287 ARG in LENGTH.
1288 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1289 is not NULL and, for TYPE == 0, its value is not equal to the length
1290 we determine or if we are unable to determine the length or value,
1291 return false. VISITED is a bitmap of visited variables.
1292 TYPE is 0 if string length should be returned, 1 for maximum string
1293 length and 2 for maximum value ARG can have. */
1295 static bool
1296 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1298 tree var, val;
1299 gimple def_stmt;
1301 if (TREE_CODE (arg) != SSA_NAME)
1303 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1304 if (TREE_CODE (arg) == ADDR_EXPR
1305 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1306 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1308 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1309 if (TREE_CODE (aop0) == INDIRECT_REF
1310 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1311 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1312 length, visited, type);
1315 if (type == 2)
1317 val = arg;
1318 if (TREE_CODE (val) != INTEGER_CST
1319 || tree_int_cst_sgn (val) < 0)
1320 return false;
1322 else
1323 val = c_strlen (arg, 1);
1324 if (!val)
1325 return false;
1327 if (*length)
1329 if (type > 0)
1331 if (TREE_CODE (*length) != INTEGER_CST
1332 || TREE_CODE (val) != INTEGER_CST)
1333 return false;
1335 if (tree_int_cst_lt (*length, val))
1336 *length = val;
1337 return true;
1339 else if (simple_cst_equal (val, *length) != 1)
1340 return false;
1343 *length = val;
1344 return true;
1347 /* If ARG is registered for SSA update we cannot look at its defining
1348 statement. */
1349 if (name_registered_for_update_p (arg))
1350 return false;
1352 /* If we were already here, break the infinite cycle. */
1353 if (!*visited)
1354 *visited = BITMAP_ALLOC (NULL);
1355 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1356 return true;
1358 var = arg;
1359 def_stmt = SSA_NAME_DEF_STMT (var);
1361 switch (gimple_code (def_stmt))
1363 case GIMPLE_ASSIGN:
1364 /* The RHS of the statement defining VAR must either have a
1365 constant length or come from another SSA_NAME with a constant
1366 length. */
1367 if (gimple_assign_single_p (def_stmt)
1368 || gimple_assign_unary_nop_p (def_stmt))
1370 tree rhs = gimple_assign_rhs1 (def_stmt);
1371 return get_maxval_strlen (rhs, length, visited, type);
1373 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1375 tree op2 = gimple_assign_rhs2 (def_stmt);
1376 tree op3 = gimple_assign_rhs3 (def_stmt);
1377 return get_maxval_strlen (op2, length, visited, type)
1378 && get_maxval_strlen (op3, length, visited, type);
1380 return false;
1382 case GIMPLE_PHI:
1384 /* All the arguments of the PHI node must have the same constant
1385 length. */
1386 unsigned i;
1388 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1390 tree arg = gimple_phi_arg (def_stmt, i)->def;
1392 /* If this PHI has itself as an argument, we cannot
1393 determine the string length of this argument. However,
1394 if we can find a constant string length for the other
1395 PHI args then we can still be sure that this is a
1396 constant string length. So be optimistic and just
1397 continue with the next argument. */
1398 if (arg == gimple_phi_result (def_stmt))
1399 continue;
1401 if (!get_maxval_strlen (arg, length, visited, type))
1402 return false;
1405 return true;
1407 default:
1408 return false;
1412 tree
1413 get_maxval_strlen (tree arg, int type)
1415 bitmap visited = NULL;
1416 tree len = NULL_TREE;
1417 if (!get_maxval_strlen (arg, &len, &visited, type))
1418 len = NULL_TREE;
1419 if (visited)
1420 BITMAP_FREE (visited);
1422 return len;
1426 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1427 If LEN is not NULL, it represents the length of the string to be
1428 copied. Return NULL_TREE if no simplification can be made. */
1430 static bool
1431 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1432 tree dest, tree src)
1434 location_t loc = gimple_location (gsi_stmt (*gsi));
1435 tree fn;
1437 /* If SRC and DEST are the same (and not volatile), return DEST. */
1438 if (operand_equal_p (src, dest, 0))
1440 replace_call_with_value (gsi, dest);
1441 return true;
1444 if (optimize_function_for_size_p (cfun))
1445 return false;
1447 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1448 if (!fn)
1449 return false;
1451 tree len = get_maxval_strlen (src, 0);
1452 if (!len)
1453 return false;
1455 len = fold_convert_loc (loc, size_type_node, len);
1456 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1457 len = force_gimple_operand_gsi (gsi, len, true,
1458 NULL_TREE, true, GSI_SAME_STMT);
1459 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1460 replace_call_with_call_and_fold (gsi, repl);
1461 return true;
1464 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1465 If SLEN is not NULL, it represents the length of the source string.
1466 Return NULL_TREE if no simplification can be made. */
1468 static bool
1469 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1470 tree dest, tree src, tree len)
1472 location_t loc = gimple_location (gsi_stmt (*gsi));
1473 tree fn;
1475 /* If the LEN parameter is zero, return DEST. */
1476 if (integer_zerop (len))
1478 replace_call_with_value (gsi, dest);
1479 return true;
1482 /* We can't compare slen with len as constants below if len is not a
1483 constant. */
1484 if (TREE_CODE (len) != INTEGER_CST)
1485 return false;
1487 /* Now, we must be passed a constant src ptr parameter. */
1488 tree slen = get_maxval_strlen (src, 0);
1489 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1490 return false;
1492 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1494 /* We do not support simplification of this case, though we do
1495 support it when expanding trees into RTL. */
1496 /* FIXME: generate a call to __builtin_memset. */
1497 if (tree_int_cst_lt (slen, len))
1498 return false;
1500 /* OK transform into builtin memcpy. */
1501 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1502 if (!fn)
1503 return false;
1505 len = fold_convert_loc (loc, size_type_node, len);
1506 len = force_gimple_operand_gsi (gsi, len, true,
1507 NULL_TREE, true, GSI_SAME_STMT);
1508 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1509 replace_call_with_call_and_fold (gsi, repl);
1510 return true;
1513 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1514 to the call.
1516 Return NULL_TREE if no simplification was possible, otherwise return the
1517 simplified form of the call as a tree.
1519 The simplified form may be a constant or other expression which
1520 computes the same value, but in a more efficient manner (including
1521 calls to other builtin functions).
1523 The call may contain arguments which need to be evaluated, but
1524 which are not useful to determine the result of the call. In
1525 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1526 COMPOUND_EXPR will be an argument which must be evaluated.
1527 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1528 COMPOUND_EXPR in the chain will contain the tree for the simplified
1529 form of the builtin function call. */
1531 static bool
1532 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1534 gimple stmt = gsi_stmt (*gsi);
1535 location_t loc = gimple_location (stmt);
1537 const char *p = c_getstr (src);
1539 /* If the string length is zero, return the dst parameter. */
1540 if (p && *p == '\0')
1542 replace_call_with_value (gsi, dst);
1543 return true;
1546 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1547 return false;
1549 /* See if we can store by pieces into (dst + strlen(dst)). */
1550 tree newdst;
1551 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1552 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1554 if (!strlen_fn || !memcpy_fn)
1555 return false;
1557 /* If the length of the source string isn't computable don't
1558 split strcat into strlen and memcpy. */
1559 tree len = get_maxval_strlen (src, 0);
1560 if (! len)
1561 return false;
1563 /* Create strlen (dst). */
1564 gimple_seq stmts = NULL, stmts2;
1565 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1566 gimple_set_location (repl, loc);
1567 if (gimple_in_ssa_p (cfun))
1568 newdst = make_ssa_name (size_type_node, NULL);
1569 else
1570 newdst = create_tmp_reg (size_type_node, NULL);
1571 gimple_call_set_lhs (repl, newdst);
1572 gimple_seq_add_stmt_without_update (&stmts, repl);
1574 /* Create (dst p+ strlen (dst)). */
1575 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1576 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1577 gimple_seq_add_seq_without_update (&stmts, stmts2);
1579 len = fold_convert_loc (loc, size_type_node, len);
1580 len = size_binop_loc (loc, PLUS_EXPR, len,
1581 build_int_cst (size_type_node, 1));
1582 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1583 gimple_seq_add_seq_without_update (&stmts, stmts2);
1585 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1586 gimple_seq_add_stmt_without_update (&stmts, repl);
1587 if (gimple_call_lhs (stmt))
1589 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1590 gimple_seq_add_stmt_without_update (&stmts, repl);
1591 gsi_replace_with_seq_vops (gsi, stmts);
1592 /* gsi now points at the assignment to the lhs, get a
1593 stmt iterator to the memcpy call.
1594 ??? We can't use gsi_for_stmt as that doesn't work when the
1595 CFG isn't built yet. */
1596 gimple_stmt_iterator gsi2 = *gsi;
1597 gsi_prev (&gsi2);
1598 fold_stmt (&gsi2);
1600 else
1602 gsi_replace_with_seq_vops (gsi, stmts);
1603 fold_stmt (gsi);
1605 return true;
1608 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1609 are the arguments to the call. */
1611 static bool
1612 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1614 gimple stmt = gsi_stmt (*gsi);
1615 tree dest = gimple_call_arg (stmt, 0);
1616 tree src = gimple_call_arg (stmt, 1);
1617 tree size = gimple_call_arg (stmt, 2);
1618 tree fn;
1619 const char *p;
1622 p = c_getstr (src);
1623 /* If the SRC parameter is "", return DEST. */
1624 if (p && *p == '\0')
1626 replace_call_with_value (gsi, dest);
1627 return true;
1630 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1631 return false;
1633 /* If __builtin_strcat_chk is used, assume strcat is available. */
1634 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1635 if (!fn)
1636 return false;
1638 gimple repl = gimple_build_call (fn, 2, dest, src);
1639 replace_call_with_call_and_fold (gsi, repl);
1640 return true;
1643 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1644 LEN, and SIZE. */
1646 static bool
1647 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1649 gimple stmt = gsi_stmt (*gsi);
1650 tree dest = gimple_call_arg (stmt, 0);
1651 tree src = gimple_call_arg (stmt, 1);
1652 tree len = gimple_call_arg (stmt, 2);
1653 tree size = gimple_call_arg (stmt, 3);
1654 tree fn;
1655 const char *p;
1657 p = c_getstr (src);
1658 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1659 if ((p && *p == '\0')
1660 || integer_zerop (len))
1662 replace_call_with_value (gsi, dest);
1663 return true;
1666 if (! tree_fits_uhwi_p (size))
1667 return false;
1669 if (! integer_all_onesp (size))
1671 tree src_len = c_strlen (src, 1);
1672 if (src_len
1673 && tree_fits_uhwi_p (src_len)
1674 && tree_fits_uhwi_p (len)
1675 && ! tree_int_cst_lt (len, src_len))
1677 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1678 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1679 if (!fn)
1680 return false;
1682 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1683 replace_call_with_call_and_fold (gsi, repl);
1684 return true;
1686 return false;
1689 /* If __builtin_strncat_chk is used, assume strncat is available. */
1690 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1691 if (!fn)
1692 return false;
1694 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1695 replace_call_with_call_and_fold (gsi, repl);
1696 return true;
1699 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1700 to the call. IGNORE is true if the value returned
1701 by the builtin will be ignored. UNLOCKED is true is true if this
1702 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1703 the known length of the string. Return NULL_TREE if no simplification
1704 was possible. */
1706 static bool
1707 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1708 tree arg0, tree arg1,
1709 bool unlocked)
1711 gimple stmt = gsi_stmt (*gsi);
1713 /* If we're using an unlocked function, assume the other unlocked
1714 functions exist explicitly. */
1715 tree const fn_fputc = (unlocked
1716 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1717 : builtin_decl_implicit (BUILT_IN_FPUTC));
1718 tree const fn_fwrite = (unlocked
1719 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1720 : builtin_decl_implicit (BUILT_IN_FWRITE));
1722 /* If the return value is used, don't do the transformation. */
1723 if (gimple_call_lhs (stmt))
1724 return false;
1726 /* Get the length of the string passed to fputs. If the length
1727 can't be determined, punt. */
1728 tree len = get_maxval_strlen (arg0, 0);
1729 if (!len
1730 || TREE_CODE (len) != INTEGER_CST)
1731 return false;
1733 switch (compare_tree_int (len, 1))
1735 case -1: /* length is 0, delete the call entirely . */
1736 replace_call_with_value (gsi, integer_zero_node);
1737 return true;
1739 case 0: /* length is 1, call fputc. */
1741 const char *p = c_getstr (arg0);
1742 if (p != NULL)
1744 if (!fn_fputc)
1745 return false;
1747 gimple repl = gimple_build_call (fn_fputc, 2,
1748 build_int_cst
1749 (integer_type_node, p[0]), arg1);
1750 replace_call_with_call_and_fold (gsi, repl);
1751 return true;
1754 /* FALLTHROUGH */
1755 case 1: /* length is greater than 1, call fwrite. */
1757 /* If optimizing for size keep fputs. */
1758 if (optimize_function_for_size_p (cfun))
1759 return false;
1760 /* New argument list transforming fputs(string, stream) to
1761 fwrite(string, 1, len, stream). */
1762 if (!fn_fwrite)
1763 return false;
1765 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1766 size_one_node, len, arg1);
1767 replace_call_with_call_and_fold (gsi, repl);
1768 return true;
1770 default:
1771 gcc_unreachable ();
1773 return false;
1776 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1777 DEST, SRC, LEN, and SIZE are the arguments to the call.
1778 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1779 code of the builtin. If MAXLEN is not NULL, it is maximum length
1780 passed as third argument. */
1782 static bool
1783 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1784 tree dest, tree src, tree len, tree size,
1785 enum built_in_function fcode)
1787 gimple stmt = gsi_stmt (*gsi);
1788 location_t loc = gimple_location (stmt);
1789 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1790 tree fn;
1792 /* If SRC and DEST are the same (and not volatile), return DEST
1793 (resp. DEST+LEN for __mempcpy_chk). */
1794 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1796 if (fcode != BUILT_IN_MEMPCPY_CHK)
1798 replace_call_with_value (gsi, dest);
1799 return true;
1801 else
1803 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1804 temp = force_gimple_operand_gsi (gsi, temp,
1805 false, NULL_TREE, true,
1806 GSI_SAME_STMT);
1807 replace_call_with_value (gsi, temp);
1808 return true;
1812 if (! tree_fits_uhwi_p (size))
1813 return false;
1815 tree maxlen = get_maxval_strlen (len, 2);
1816 if (! integer_all_onesp (size))
1818 if (! tree_fits_uhwi_p (len))
1820 /* If LEN is not constant, try MAXLEN too.
1821 For MAXLEN only allow optimizing into non-_ocs function
1822 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1823 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1825 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1827 /* (void) __mempcpy_chk () can be optimized into
1828 (void) __memcpy_chk (). */
1829 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1830 if (!fn)
1831 return false;
1833 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1834 replace_call_with_call_and_fold (gsi, repl);
1835 return true;
1837 return false;
1840 else
1841 maxlen = len;
1843 if (tree_int_cst_lt (size, maxlen))
1844 return false;
1847 fn = NULL_TREE;
1848 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1849 mem{cpy,pcpy,move,set} is available. */
1850 switch (fcode)
1852 case BUILT_IN_MEMCPY_CHK:
1853 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1854 break;
1855 case BUILT_IN_MEMPCPY_CHK:
1856 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1857 break;
1858 case BUILT_IN_MEMMOVE_CHK:
1859 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1860 break;
1861 case BUILT_IN_MEMSET_CHK:
1862 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1863 break;
1864 default:
1865 break;
1868 if (!fn)
1869 return false;
1871 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1872 replace_call_with_call_and_fold (gsi, repl);
1873 return true;
1876 /* Fold a call to the __st[rp]cpy_chk builtin.
1877 DEST, SRC, and SIZE are the arguments to the call.
1878 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1879 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1880 strings passed as second argument. */
1882 static bool
1883 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1884 tree dest,
1885 tree src, tree size,
1886 enum built_in_function fcode)
1888 gimple stmt = gsi_stmt (*gsi);
1889 location_t loc = gimple_location (stmt);
1890 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1891 tree len, fn;
1893 /* If SRC and DEST are the same (and not volatile), return DEST. */
1894 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1896 replace_call_with_value (gsi, dest);
1897 return true;
1900 if (! tree_fits_uhwi_p (size))
1901 return false;
1903 tree maxlen = get_maxval_strlen (src, 1);
1904 if (! integer_all_onesp (size))
1906 len = c_strlen (src, 1);
1907 if (! len || ! tree_fits_uhwi_p (len))
1909 /* If LEN is not constant, try MAXLEN too.
1910 For MAXLEN only allow optimizing into non-_ocs function
1911 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1912 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1914 if (fcode == BUILT_IN_STPCPY_CHK)
1916 if (! ignore)
1917 return false;
1919 /* If return value of __stpcpy_chk is ignored,
1920 optimize into __strcpy_chk. */
1921 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1922 if (!fn)
1923 return false;
1925 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1926 replace_call_with_call_and_fold (gsi, repl);
1927 return true;
1930 if (! len || TREE_SIDE_EFFECTS (len))
1931 return false;
1933 /* If c_strlen returned something, but not a constant,
1934 transform __strcpy_chk into __memcpy_chk. */
1935 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1936 if (!fn)
1937 return false;
1939 len = fold_convert_loc (loc, size_type_node, len);
1940 len = size_binop_loc (loc, PLUS_EXPR, len,
1941 build_int_cst (size_type_node, 1));
1942 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1943 true, GSI_SAME_STMT);
1944 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1945 replace_call_with_call_and_fold (gsi, repl);
1946 return true;
1949 else
1950 maxlen = len;
1952 if (! tree_int_cst_lt (maxlen, size))
1953 return false;
1956 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1957 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1958 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1959 if (!fn)
1960 return false;
1962 gimple repl = gimple_build_call (fn, 2, dest, src);
1963 replace_call_with_call_and_fold (gsi, repl);
1964 return true;
1967 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1968 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1969 length passed as third argument. IGNORE is true if return value can be
1970 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1972 static bool
1973 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1974 tree dest, tree src,
1975 tree len, tree size,
1976 enum built_in_function fcode)
1978 gimple stmt = gsi_stmt (*gsi);
1979 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1980 tree fn;
1982 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1984 /* If return value of __stpncpy_chk is ignored,
1985 optimize into __strncpy_chk. */
1986 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1987 if (fn)
1989 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1990 replace_call_with_call_and_fold (gsi, repl);
1991 return true;
1995 if (! tree_fits_uhwi_p (size))
1996 return false;
1998 tree maxlen = get_maxval_strlen (len, 2);
1999 if (! integer_all_onesp (size))
2001 if (! tree_fits_uhwi_p (len))
2003 /* If LEN is not constant, try MAXLEN too.
2004 For MAXLEN only allow optimizing into non-_ocs function
2005 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2006 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2007 return false;
2009 else
2010 maxlen = len;
2012 if (tree_int_cst_lt (size, maxlen))
2013 return false;
2016 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2017 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2018 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2019 if (!fn)
2020 return false;
2022 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2023 replace_call_with_call_and_fold (gsi, repl);
2024 return true;
2027 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2028 NULL_TREE if a normal call should be emitted rather than expanding
2029 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2030 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2031 passed as second argument. */
2033 static bool
2034 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2035 enum built_in_function fcode)
2037 gimple stmt = gsi_stmt (*gsi);
2038 tree dest, size, len, fn, fmt, flag;
2039 const char *fmt_str;
2041 /* Verify the required arguments in the original call. */
2042 if (gimple_call_num_args (stmt) < 5)
2043 return false;
2045 dest = gimple_call_arg (stmt, 0);
2046 len = gimple_call_arg (stmt, 1);
2047 flag = gimple_call_arg (stmt, 2);
2048 size = gimple_call_arg (stmt, 3);
2049 fmt = gimple_call_arg (stmt, 4);
2051 if (! tree_fits_uhwi_p (size))
2052 return false;
2054 if (! integer_all_onesp (size))
2056 tree maxlen = get_maxval_strlen (len, 2);
2057 if (! tree_fits_uhwi_p (len))
2059 /* If LEN is not constant, try MAXLEN too.
2060 For MAXLEN only allow optimizing into non-_ocs function
2061 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2062 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2063 return false;
2065 else
2066 maxlen = len;
2068 if (tree_int_cst_lt (size, maxlen))
2069 return false;
2072 if (!init_target_chars ())
2073 return false;
2075 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2076 or if format doesn't contain % chars or is "%s". */
2077 if (! integer_zerop (flag))
2079 fmt_str = c_getstr (fmt);
2080 if (fmt_str == NULL)
2081 return false;
2082 if (strchr (fmt_str, target_percent) != NULL
2083 && strcmp (fmt_str, target_percent_s))
2084 return false;
2087 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2088 available. */
2089 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2090 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2091 if (!fn)
2092 return false;
2094 /* Replace the called function and the first 5 argument by 3 retaining
2095 trailing varargs. */
2096 gimple_call_set_fndecl (stmt, fn);
2097 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2098 gimple_call_set_arg (stmt, 0, dest);
2099 gimple_call_set_arg (stmt, 1, len);
2100 gimple_call_set_arg (stmt, 2, fmt);
2101 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2102 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2103 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2104 fold_stmt (gsi);
2105 return true;
2108 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2109 Return NULL_TREE if a normal call should be emitted rather than
2110 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2111 or BUILT_IN_VSPRINTF_CHK. */
2113 static bool
2114 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2115 enum built_in_function fcode)
2117 gimple stmt = gsi_stmt (*gsi);
2118 tree dest, size, len, fn, fmt, flag;
2119 const char *fmt_str;
2120 unsigned nargs = gimple_call_num_args (stmt);
2122 /* Verify the required arguments in the original call. */
2123 if (nargs < 4)
2124 return false;
2125 dest = gimple_call_arg (stmt, 0);
2126 flag = gimple_call_arg (stmt, 1);
2127 size = gimple_call_arg (stmt, 2);
2128 fmt = gimple_call_arg (stmt, 3);
2130 if (! tree_fits_uhwi_p (size))
2131 return false;
2133 len = NULL_TREE;
2135 if (!init_target_chars ())
2136 return false;
2138 /* Check whether the format is a literal string constant. */
2139 fmt_str = c_getstr (fmt);
2140 if (fmt_str != NULL)
2142 /* If the format doesn't contain % args or %%, we know the size. */
2143 if (strchr (fmt_str, target_percent) == 0)
2145 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2146 len = build_int_cstu (size_type_node, strlen (fmt_str));
2148 /* If the format is "%s" and first ... argument is a string literal,
2149 we know the size too. */
2150 else if (fcode == BUILT_IN_SPRINTF_CHK
2151 && strcmp (fmt_str, target_percent_s) == 0)
2153 tree arg;
2155 if (nargs == 5)
2157 arg = gimple_call_arg (stmt, 4);
2158 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2160 len = c_strlen (arg, 1);
2161 if (! len || ! tree_fits_uhwi_p (len))
2162 len = NULL_TREE;
2168 if (! integer_all_onesp (size))
2170 if (! len || ! tree_int_cst_lt (len, size))
2171 return false;
2174 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2175 or if format doesn't contain % chars or is "%s". */
2176 if (! integer_zerop (flag))
2178 if (fmt_str == NULL)
2179 return false;
2180 if (strchr (fmt_str, target_percent) != NULL
2181 && strcmp (fmt_str, target_percent_s))
2182 return false;
2185 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2186 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2187 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2188 if (!fn)
2189 return false;
2191 /* Replace the called function and the first 4 argument by 2 retaining
2192 trailing varargs. */
2193 gimple_call_set_fndecl (stmt, fn);
2194 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2195 gimple_call_set_arg (stmt, 0, dest);
2196 gimple_call_set_arg (stmt, 1, fmt);
2197 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2198 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2199 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2200 fold_stmt (gsi);
2201 return true;
2204 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2205 ORIG may be null if this is a 2-argument call. We don't attempt to
2206 simplify calls with more than 3 arguments.
2208 Return NULL_TREE if no simplification was possible, otherwise return the
2209 simplified form of the call as a tree. If IGNORED is true, it means that
2210 the caller does not use the returned value of the function. */
2212 static bool
2213 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2215 gimple stmt = gsi_stmt (*gsi);
2216 tree dest = gimple_call_arg (stmt, 0);
2217 tree fmt = gimple_call_arg (stmt, 1);
2218 tree orig = NULL_TREE;
2219 const char *fmt_str = NULL;
2221 /* Verify the required arguments in the original call. We deal with two
2222 types of sprintf() calls: 'sprintf (str, fmt)' and
2223 'sprintf (dest, "%s", orig)'. */
2224 if (gimple_call_num_args (stmt) > 3)
2225 return false;
2227 if (gimple_call_num_args (stmt) == 3)
2228 orig = gimple_call_arg (stmt, 2);
2230 /* Check whether the format is a literal string constant. */
2231 fmt_str = c_getstr (fmt);
2232 if (fmt_str == NULL)
2233 return false;
2235 if (!init_target_chars ())
2236 return false;
2238 /* If the format doesn't contain % args or %%, use strcpy. */
2239 if (strchr (fmt_str, target_percent) == NULL)
2241 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2243 if (!fn)
2244 return false;
2246 /* Don't optimize sprintf (buf, "abc", ptr++). */
2247 if (orig)
2248 return false;
2250 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2251 'format' is known to contain no % formats. */
2252 gimple_seq stmts = NULL;
2253 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2254 gimple_seq_add_stmt_without_update (&stmts, repl);
2255 if (gimple_call_lhs (stmt))
2257 repl = gimple_build_assign (gimple_call_lhs (stmt),
2258 build_int_cst (integer_type_node,
2259 strlen (fmt_str)));
2260 gimple_seq_add_stmt_without_update (&stmts, repl);
2261 gsi_replace_with_seq_vops (gsi, stmts);
2262 /* gsi now points at the assignment to the lhs, get a
2263 stmt iterator to the memcpy call.
2264 ??? We can't use gsi_for_stmt as that doesn't work when the
2265 CFG isn't built yet. */
2266 gimple_stmt_iterator gsi2 = *gsi;
2267 gsi_prev (&gsi2);
2268 fold_stmt (&gsi2);
2270 else
2272 gsi_replace_with_seq_vops (gsi, stmts);
2273 fold_stmt (gsi);
2275 return true;
2278 /* If the format is "%s", use strcpy if the result isn't used. */
2279 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2281 tree fn;
2282 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2284 if (!fn)
2285 return false;
2287 /* Don't crash on sprintf (str1, "%s"). */
2288 if (!orig)
2289 return false;
2291 tree orig_len = NULL_TREE;
2292 if (gimple_call_lhs (stmt))
2294 orig_len = get_maxval_strlen (orig, 0);
2295 if (!orig_len)
2296 return false;
2299 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2300 gimple_seq stmts = NULL;
2301 gimple repl = gimple_build_call (fn, 2, dest, orig);
2302 gimple_seq_add_stmt_without_update (&stmts, repl);
2303 if (gimple_call_lhs (stmt))
2305 if (!useless_type_conversion_p (integer_type_node,
2306 TREE_TYPE (orig_len)))
2307 orig_len = fold_convert (integer_type_node, orig_len);
2308 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2309 gimple_seq_add_stmt_without_update (&stmts, repl);
2310 gsi_replace_with_seq_vops (gsi, stmts);
2311 /* gsi now points at the assignment to the lhs, get a
2312 stmt iterator to the memcpy call.
2313 ??? We can't use gsi_for_stmt as that doesn't work when the
2314 CFG isn't built yet. */
2315 gimple_stmt_iterator gsi2 = *gsi;
2316 gsi_prev (&gsi2);
2317 fold_stmt (&gsi2);
2319 else
2321 gsi_replace_with_seq_vops (gsi, stmts);
2322 fold_stmt (gsi);
2324 return true;
2326 return false;
2329 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2330 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2331 attempt to simplify calls with more than 4 arguments.
2333 Return NULL_TREE if no simplification was possible, otherwise return the
2334 simplified form of the call as a tree. If IGNORED is true, it means that
2335 the caller does not use the returned value of the function. */
2337 static bool
2338 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2340 gimple stmt = gsi_stmt (*gsi);
2341 tree dest = gimple_call_arg (stmt, 0);
2342 tree destsize = gimple_call_arg (stmt, 1);
2343 tree fmt = gimple_call_arg (stmt, 2);
2344 tree orig = NULL_TREE;
2345 const char *fmt_str = NULL;
2347 if (gimple_call_num_args (stmt) > 4)
2348 return false;
2350 if (gimple_call_num_args (stmt) == 4)
2351 orig = gimple_call_arg (stmt, 3);
2353 if (!tree_fits_uhwi_p (destsize))
2354 return false;
2355 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2357 /* Check whether the format is a literal string constant. */
2358 fmt_str = c_getstr (fmt);
2359 if (fmt_str == NULL)
2360 return false;
2362 if (!init_target_chars ())
2363 return false;
2365 /* If the format doesn't contain % args or %%, use strcpy. */
2366 if (strchr (fmt_str, target_percent) == NULL)
2368 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2369 if (!fn)
2370 return false;
2372 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2373 if (orig)
2374 return false;
2376 /* We could expand this as
2377 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2378 or to
2379 memcpy (str, fmt_with_nul_at_cstm1, cst);
2380 but in the former case that might increase code size
2381 and in the latter case grow .rodata section too much.
2382 So punt for now. */
2383 size_t len = strlen (fmt_str);
2384 if (len >= destlen)
2385 return false;
2387 gimple_seq stmts = NULL;
2388 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2389 gimple_seq_add_stmt_without_update (&stmts, repl);
2390 if (gimple_call_lhs (stmt))
2392 repl = gimple_build_assign (gimple_call_lhs (stmt),
2393 build_int_cst (integer_type_node, len));
2394 gimple_seq_add_stmt_without_update (&stmts, repl);
2395 gsi_replace_with_seq_vops (gsi, stmts);
2396 /* gsi now points at the assignment to the lhs, get a
2397 stmt iterator to the memcpy call.
2398 ??? We can't use gsi_for_stmt as that doesn't work when the
2399 CFG isn't built yet. */
2400 gimple_stmt_iterator gsi2 = *gsi;
2401 gsi_prev (&gsi2);
2402 fold_stmt (&gsi2);
2404 else
2406 gsi_replace_with_seq_vops (gsi, stmts);
2407 fold_stmt (gsi);
2409 return true;
2412 /* If the format is "%s", use strcpy if the result isn't used. */
2413 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2415 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2416 if (!fn)
2417 return false;
2419 /* Don't crash on snprintf (str1, cst, "%s"). */
2420 if (!orig)
2421 return false;
2423 tree orig_len = get_maxval_strlen (orig, 0);
2424 if (!orig_len)
2425 return false;
2427 /* We could expand this as
2428 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2429 or to
2430 memcpy (str1, str2_with_nul_at_cstm1, cst);
2431 but in the former case that might increase code size
2432 and in the latter case grow .rodata section too much.
2433 So punt for now. */
2434 if (compare_tree_int (orig_len, destlen) >= 0)
2435 return false;
2437 /* Convert snprintf (str1, cst, "%s", str2) into
2438 strcpy (str1, str2) if strlen (str2) < cst. */
2439 gimple_seq stmts = NULL;
2440 gimple repl = gimple_build_call (fn, 2, dest, orig);
2441 gimple_seq_add_stmt_without_update (&stmts, repl);
2442 if (gimple_call_lhs (stmt))
2444 if (!useless_type_conversion_p (integer_type_node,
2445 TREE_TYPE (orig_len)))
2446 orig_len = fold_convert (integer_type_node, orig_len);
2447 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2448 gimple_seq_add_stmt_without_update (&stmts, repl);
2449 gsi_replace_with_seq_vops (gsi, stmts);
2450 /* gsi now points at the assignment to the lhs, get a
2451 stmt iterator to the memcpy call.
2452 ??? We can't use gsi_for_stmt as that doesn't work when the
2453 CFG isn't built yet. */
2454 gimple_stmt_iterator gsi2 = *gsi;
2455 gsi_prev (&gsi2);
2456 fold_stmt (&gsi2);
2458 else
2460 gsi_replace_with_seq_vops (gsi, stmts);
2461 fold_stmt (gsi);
2463 return true;
2465 return false;
2469 /* Fold a call to __builtin_strlen with known length LEN. */
2471 static bool
2472 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2474 gimple stmt = gsi_stmt (*gsi);
2475 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2476 if (!len)
2477 return false;
2478 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2479 replace_call_with_value (gsi, len);
2480 return true;
2484 /* Fold the non-target builtin at *GSI and return whether any simplification
2485 was made. */
2487 static bool
2488 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2490 gimple stmt = gsi_stmt (*gsi);
2491 tree callee = gimple_call_fndecl (stmt);
2493 /* Give up for always_inline inline builtins until they are
2494 inlined. */
2495 if (avoid_folding_inline_builtin (callee))
2496 return false;
2498 switch (DECL_FUNCTION_CODE (callee))
2500 case BUILT_IN_BZERO:
2501 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2502 gimple_call_arg (stmt, 1));
2503 case BUILT_IN_MEMSET:
2504 return gimple_fold_builtin_memset (gsi,
2505 gimple_call_arg (stmt, 1),
2506 gimple_call_arg (stmt, 2));
2507 case BUILT_IN_BCOPY:
2508 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2509 gimple_call_arg (stmt, 0), 3);
2510 case BUILT_IN_MEMCPY:
2511 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2512 gimple_call_arg (stmt, 1), 0);
2513 case BUILT_IN_MEMPCPY:
2514 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2515 gimple_call_arg (stmt, 1), 1);
2516 case BUILT_IN_MEMMOVE:
2517 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2518 gimple_call_arg (stmt, 1), 3);
2519 case BUILT_IN_SPRINTF_CHK:
2520 case BUILT_IN_VSPRINTF_CHK:
2521 return gimple_fold_builtin_sprintf_chk (gsi, DECL_FUNCTION_CODE (callee));
2522 case BUILT_IN_STRCAT_CHK:
2523 return gimple_fold_builtin_strcat_chk (gsi);
2524 case BUILT_IN_STRNCAT_CHK:
2525 return gimple_fold_builtin_strncat_chk (gsi);
2526 case BUILT_IN_STRLEN:
2527 return gimple_fold_builtin_strlen (gsi);
2528 case BUILT_IN_STRCPY:
2529 return gimple_fold_builtin_strcpy (gsi,
2530 gimple_call_arg (stmt, 0),
2531 gimple_call_arg (stmt, 1));
2532 case BUILT_IN_STRNCPY:
2533 return gimple_fold_builtin_strncpy (gsi,
2534 gimple_call_arg (stmt, 0),
2535 gimple_call_arg (stmt, 1),
2536 gimple_call_arg (stmt, 2));
2537 case BUILT_IN_STRCAT:
2538 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2539 gimple_call_arg (stmt, 1));
2540 case BUILT_IN_FPUTS:
2541 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2542 gimple_call_arg (stmt, 1), false);
2543 case BUILT_IN_FPUTS_UNLOCKED:
2544 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2545 gimple_call_arg (stmt, 1), true);
2546 case BUILT_IN_MEMCPY_CHK:
2547 case BUILT_IN_MEMPCPY_CHK:
2548 case BUILT_IN_MEMMOVE_CHK:
2549 case BUILT_IN_MEMSET_CHK:
2550 return gimple_fold_builtin_memory_chk (gsi,
2551 gimple_call_arg (stmt, 0),
2552 gimple_call_arg (stmt, 1),
2553 gimple_call_arg (stmt, 2),
2554 gimple_call_arg (stmt, 3),
2555 DECL_FUNCTION_CODE (callee));
2556 case BUILT_IN_STRCPY_CHK:
2557 case BUILT_IN_STPCPY_CHK:
2558 return gimple_fold_builtin_stxcpy_chk (gsi,
2559 gimple_call_arg (stmt, 0),
2560 gimple_call_arg (stmt, 1),
2561 gimple_call_arg (stmt, 2),
2562 DECL_FUNCTION_CODE (callee));
2563 case BUILT_IN_STRNCPY_CHK:
2564 case BUILT_IN_STPNCPY_CHK:
2565 return gimple_fold_builtin_stxncpy_chk (gsi,
2566 gimple_call_arg (stmt, 0),
2567 gimple_call_arg (stmt, 1),
2568 gimple_call_arg (stmt, 2),
2569 gimple_call_arg (stmt, 3),
2570 DECL_FUNCTION_CODE (callee));
2571 case BUILT_IN_SNPRINTF_CHK:
2572 case BUILT_IN_VSNPRINTF_CHK:
2573 return gimple_fold_builtin_snprintf_chk (gsi,
2574 DECL_FUNCTION_CODE (callee));
2575 case BUILT_IN_SNPRINTF:
2576 return gimple_fold_builtin_snprintf (gsi);
2577 case BUILT_IN_SPRINTF:
2578 return gimple_fold_builtin_sprintf (gsi);
2579 default:;
2582 /* Try the generic builtin folder. */
2583 bool ignore = (gimple_call_lhs (stmt) == NULL);
2584 tree result = fold_call_stmt (stmt, ignore);
2585 if (result)
2587 if (ignore)
2588 STRIP_NOPS (result);
2589 else
2590 result = fold_convert (gimple_call_return_type (stmt), result);
2591 if (!update_call_from_tree (gsi, result))
2592 gimplify_and_update_call_from_tree (gsi, result);
2593 return true;
2596 return false;
2599 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2600 The statement may be replaced by another statement, e.g., if the call
2601 simplifies to a constant value. Return true if any changes were made.
2602 It is assumed that the operands have been previously folded. */
2604 static bool
2605 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2607 gimple stmt = gsi_stmt (*gsi);
2608 tree callee;
2609 bool changed = false;
2610 unsigned i;
2612 /* Fold *& in call arguments. */
2613 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2614 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2616 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2617 if (tmp)
2619 gimple_call_set_arg (stmt, i, tmp);
2620 changed = true;
2624 /* Check for virtual calls that became direct calls. */
2625 callee = gimple_call_fn (stmt);
2626 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2628 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2630 if (dump_file && virtual_method_call_p (callee)
2631 && !possible_polymorphic_call_target_p
2632 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2633 (OBJ_TYPE_REF_EXPR (callee)))))
2635 fprintf (dump_file,
2636 "Type inheritance inconsistent devirtualization of ");
2637 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2638 fprintf (dump_file, " to ");
2639 print_generic_expr (dump_file, callee, TDF_SLIM);
2640 fprintf (dump_file, "\n");
2643 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
2644 changed = true;
2646 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
2648 bool final;
2649 vec <cgraph_node *>targets
2650 = possible_polymorphic_call_targets (callee, stmt, &final);
2651 if (final && targets.length () <= 1 && dbg_cnt (devirt))
2653 tree lhs = gimple_call_lhs (stmt);
2654 if (dump_enabled_p ())
2656 location_t loc = gimple_location_safe (stmt);
2657 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2658 "folding virtual function call to %s\n",
2659 targets.length () == 1
2660 ? targets[0]->name ()
2661 : "__builtin_unreachable");
2663 if (targets.length () == 1)
2665 gimple_call_set_fndecl (stmt, targets[0]->decl);
2666 changed = true;
2667 /* If the call becomes noreturn, remove the lhs. */
2668 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
2670 if (TREE_CODE (lhs) == SSA_NAME)
2672 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2673 tree def = get_or_create_ssa_default_def (cfun, var);
2674 gimple new_stmt = gimple_build_assign (lhs, def);
2675 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
2677 gimple_call_set_lhs (stmt, NULL_TREE);
2680 else
2682 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2683 gimple new_stmt = gimple_build_call (fndecl, 0);
2684 gimple_set_location (new_stmt, gimple_location (stmt));
2685 if (lhs && TREE_CODE (lhs) == SSA_NAME)
2687 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2688 tree def = get_or_create_ssa_default_def (cfun, var);
2690 /* To satisfy condition for
2691 cgraph_update_edges_for_call_stmt_node,
2692 we need to preserve GIMPLE_CALL statement
2693 at position of GSI iterator. */
2694 update_call_from_tree (gsi, def);
2695 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
2697 else
2699 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2700 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2701 gsi_replace (gsi, new_stmt, false);
2703 return true;
2709 if (inplace)
2710 return changed;
2712 /* Check for builtins that CCP can handle using information not
2713 available in the generic fold routines. */
2714 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2716 if (gimple_fold_builtin (gsi))
2717 changed = true;
2719 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
2721 changed |= targetm.gimple_fold_builtin (gsi);
2723 else if (gimple_call_internal_p (stmt))
2725 enum tree_code subcode = ERROR_MARK;
2726 tree result = NULL_TREE;
2727 switch (gimple_call_internal_fn (stmt))
2729 case IFN_BUILTIN_EXPECT:
2730 result = fold_builtin_expect (gimple_location (stmt),
2731 gimple_call_arg (stmt, 0),
2732 gimple_call_arg (stmt, 1),
2733 gimple_call_arg (stmt, 2));
2734 break;
2735 case IFN_UBSAN_OBJECT_SIZE:
2736 if (integer_all_onesp (gimple_call_arg (stmt, 2))
2737 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
2738 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
2739 && tree_int_cst_le (gimple_call_arg (stmt, 1),
2740 gimple_call_arg (stmt, 2))))
2742 gsi_replace (gsi, gimple_build_nop (), true);
2743 unlink_stmt_vdef (stmt);
2744 release_defs (stmt);
2745 return true;
2747 break;
2748 case IFN_UBSAN_CHECK_ADD:
2749 subcode = PLUS_EXPR;
2750 break;
2751 case IFN_UBSAN_CHECK_SUB:
2752 subcode = MINUS_EXPR;
2753 break;
2754 case IFN_UBSAN_CHECK_MUL:
2755 subcode = MULT_EXPR;
2756 break;
2757 default:
2758 break;
2760 if (subcode != ERROR_MARK)
2762 tree arg0 = gimple_call_arg (stmt, 0);
2763 tree arg1 = gimple_call_arg (stmt, 1);
2764 /* x = y + 0; x = y - 0; x = y * 0; */
2765 if (integer_zerop (arg1))
2766 result = subcode == MULT_EXPR
2767 ? build_zero_cst (TREE_TYPE (arg0))
2768 : arg0;
2769 /* x = 0 + y; x = 0 * y; */
2770 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
2771 result = subcode == MULT_EXPR
2772 ? build_zero_cst (TREE_TYPE (arg0))
2773 : arg1;
2774 /* x = y - y; */
2775 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
2776 result = build_zero_cst (TREE_TYPE (arg0));
2777 /* x = y * 1; x = 1 * y; */
2778 else if (subcode == MULT_EXPR)
2780 if (integer_onep (arg1))
2781 result = arg0;
2782 else if (integer_onep (arg0))
2783 result = arg1;
2786 if (result)
2788 if (!update_call_from_tree (gsi, result))
2789 gimplify_and_update_call_from_tree (gsi, result);
2790 changed = true;
2794 return changed;
2798 /* Worker for fold_stmt_1 dispatch to pattern based folding with
2799 gimple_simplify.
2801 Replaces *GSI with the simplification result in RCODE and OPS
2802 and the associated statements in *SEQ. Does the replacement
2803 according to INPLACE and returns true if the operation succeeded. */
2805 static bool
2806 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
2807 code_helper rcode, tree *ops,
2808 gimple_seq *seq, bool inplace)
2810 gimple stmt = gsi_stmt (*gsi);
2812 /* Play safe and do not allow abnormals to be mentioned in
2813 newly created statements. See also maybe_push_res_to_seq. */
2814 if ((TREE_CODE (ops[0]) == SSA_NAME
2815 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
2816 || (ops[1]
2817 && TREE_CODE (ops[1]) == SSA_NAME
2818 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
2819 || (ops[2]
2820 && TREE_CODE (ops[2]) == SSA_NAME
2821 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
2822 return false;
2824 if (gimple_code (stmt) == GIMPLE_COND)
2826 gcc_assert (rcode.is_tree_code ());
2827 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
2828 /* GIMPLE_CONDs condition may not throw. */
2829 && (!flag_exceptions
2830 || !cfun->can_throw_non_call_exceptions
2831 || !operation_could_trap_p (rcode,
2832 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
2833 false, NULL_TREE)))
2834 gimple_cond_set_condition (stmt, rcode, ops[0], ops[1]);
2835 else if (rcode == SSA_NAME)
2836 gimple_cond_set_condition (stmt, NE_EXPR, ops[0],
2837 build_zero_cst (TREE_TYPE (ops[0])));
2838 else if (rcode == INTEGER_CST)
2840 if (integer_zerop (ops[0]))
2841 gimple_cond_make_false (stmt);
2842 else
2843 gimple_cond_make_true (stmt);
2845 else if (!inplace)
2847 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
2848 ops, seq);
2849 if (!res)
2850 return false;
2851 gimple_cond_set_condition (stmt, NE_EXPR, res,
2852 build_zero_cst (TREE_TYPE (res)));
2854 else
2855 return false;
2856 if (dump_file && (dump_flags & TDF_DETAILS))
2858 fprintf (dump_file, "gimple_simplified to ");
2859 if (!gimple_seq_empty_p (*seq))
2860 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2861 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2862 0, TDF_SLIM);
2864 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2865 return true;
2867 else if (is_gimple_assign (stmt)
2868 && rcode.is_tree_code ())
2870 if (!inplace
2871 || gimple_num_ops (stmt) <= get_gimple_rhs_num_ops (rcode))
2873 maybe_build_generic_op (rcode,
2874 TREE_TYPE (gimple_assign_lhs (stmt)),
2875 &ops[0], ops[1], ops[2]);
2876 gimple_assign_set_rhs_with_ops_1 (gsi, rcode,
2877 ops[0], ops[1], ops[2]);
2878 if (dump_file && (dump_flags & TDF_DETAILS))
2880 fprintf (dump_file, "gimple_simplified to ");
2881 if (!gimple_seq_empty_p (*seq))
2882 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2883 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2884 0, TDF_SLIM);
2886 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2887 return true;
2890 else if (!inplace)
2892 if (gimple_has_lhs (stmt))
2894 tree lhs = gimple_get_lhs (stmt);
2895 maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
2896 ops, seq, lhs);
2897 if (dump_file && (dump_flags & TDF_DETAILS))
2899 fprintf (dump_file, "gimple_simplified to ");
2900 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2902 gsi_replace_with_seq_vops (gsi, *seq);
2903 return true;
2905 else
2906 gcc_unreachable ();
2909 return false;
2912 /* Canonicalize MEM_REFs invariant address operand after propagation. */
2914 static bool
2915 maybe_canonicalize_mem_ref_addr (tree *t)
2917 bool res = false;
2919 if (TREE_CODE (*t) == ADDR_EXPR)
2920 t = &TREE_OPERAND (*t, 0);
2922 while (handled_component_p (*t))
2923 t = &TREE_OPERAND (*t, 0);
2925 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
2926 of invariant addresses into a SSA name MEM_REF address. */
2927 if (TREE_CODE (*t) == MEM_REF
2928 || TREE_CODE (*t) == TARGET_MEM_REF)
2930 tree addr = TREE_OPERAND (*t, 0);
2931 if (TREE_CODE (addr) == ADDR_EXPR
2932 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
2933 || handled_component_p (TREE_OPERAND (addr, 0))))
2935 tree base;
2936 HOST_WIDE_INT coffset;
2937 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
2938 &coffset);
2939 if (!base)
2940 gcc_unreachable ();
2942 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
2943 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
2944 TREE_OPERAND (*t, 1),
2945 size_int (coffset));
2946 res = true;
2948 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
2949 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
2952 /* Canonicalize back MEM_REFs to plain reference trees if the object
2953 accessed is a decl that has the same access semantics as the MEM_REF. */
2954 if (TREE_CODE (*t) == MEM_REF
2955 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
2956 && integer_zerop (TREE_OPERAND (*t, 1)))
2958 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2959 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
2960 if (/* Same volatile qualification. */
2961 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
2962 /* Same TBAA behavior with -fstrict-aliasing. */
2963 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
2964 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
2965 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
2966 /* Same alignment. */
2967 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
2968 /* We have to look out here to not drop a required conversion
2969 from the rhs to the lhs if *t appears on the lhs or vice-versa
2970 if it appears on the rhs. Thus require strict type
2971 compatibility. */
2972 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
2974 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2975 res = true;
2979 /* Canonicalize TARGET_MEM_REF in particular with respect to
2980 the indexes becoming constant. */
2981 else if (TREE_CODE (*t) == TARGET_MEM_REF)
2983 tree tem = maybe_fold_tmr (*t);
2984 if (tem)
2986 *t = tem;
2987 res = true;
2991 return res;
2994 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2995 distinguishes both cases. */
2997 static bool
2998 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3000 bool changed = false;
3001 gimple stmt = gsi_stmt (*gsi);
3002 unsigned i;
3004 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3005 after propagation.
3006 ??? This shouldn't be done in generic folding but in the
3007 propagation helpers which also know whether an address was
3008 propagated. */
3009 switch (gimple_code (stmt))
3011 case GIMPLE_ASSIGN:
3012 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3014 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3015 if ((REFERENCE_CLASS_P (*rhs)
3016 || TREE_CODE (*rhs) == ADDR_EXPR)
3017 && maybe_canonicalize_mem_ref_addr (rhs))
3018 changed = true;
3019 tree *lhs = gimple_assign_lhs_ptr (stmt);
3020 if (REFERENCE_CLASS_P (*lhs)
3021 && maybe_canonicalize_mem_ref_addr (lhs))
3022 changed = true;
3024 break;
3025 case GIMPLE_CALL:
3027 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3029 tree *arg = gimple_call_arg_ptr (stmt, i);
3030 if (REFERENCE_CLASS_P (*arg)
3031 && maybe_canonicalize_mem_ref_addr (arg))
3032 changed = true;
3034 tree *lhs = gimple_call_lhs_ptr (stmt);
3035 if (*lhs
3036 && REFERENCE_CLASS_P (*lhs)
3037 && maybe_canonicalize_mem_ref_addr (lhs))
3038 changed = true;
3039 break;
3041 case GIMPLE_ASM:
3043 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3045 tree link = gimple_asm_output_op (stmt, i);
3046 tree op = TREE_VALUE (link);
3047 if (REFERENCE_CLASS_P (op)
3048 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3049 changed = true;
3051 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3053 tree link = gimple_asm_input_op (stmt, i);
3054 tree op = TREE_VALUE (link);
3055 if ((REFERENCE_CLASS_P (op)
3056 || TREE_CODE (op) == ADDR_EXPR)
3057 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3058 changed = true;
3061 break;
3062 case GIMPLE_DEBUG:
3063 if (gimple_debug_bind_p (stmt))
3065 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3066 if (*val
3067 && (REFERENCE_CLASS_P (*val)
3068 || TREE_CODE (*val) == ADDR_EXPR)
3069 && maybe_canonicalize_mem_ref_addr (val))
3070 changed = true;
3072 break;
3073 default:;
3076 /* Dispatch to pattern-based folding. */
3077 if (!inplace
3078 || is_gimple_assign (stmt)
3079 || gimple_code (stmt) == GIMPLE_COND)
3081 gimple_seq seq = NULL;
3082 code_helper rcode;
3083 tree ops[3] = {};
3084 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3086 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3087 changed = true;
3088 else
3089 gimple_seq_discard (seq);
3093 stmt = gsi_stmt (*gsi);
3095 /* Fold the main computation performed by the statement. */
3096 switch (gimple_code (stmt))
3098 case GIMPLE_ASSIGN:
3100 unsigned old_num_ops = gimple_num_ops (stmt);
3101 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3102 tree lhs = gimple_assign_lhs (stmt);
3103 tree new_rhs;
3104 /* First canonicalize operand order. This avoids building new
3105 trees if this is the only thing fold would later do. */
3106 if ((commutative_tree_code (subcode)
3107 || commutative_ternary_tree_code (subcode))
3108 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3109 gimple_assign_rhs2 (stmt), false))
3111 tree tem = gimple_assign_rhs1 (stmt);
3112 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3113 gimple_assign_set_rhs2 (stmt, tem);
3114 changed = true;
3116 new_rhs = fold_gimple_assign (gsi);
3117 if (new_rhs
3118 && !useless_type_conversion_p (TREE_TYPE (lhs),
3119 TREE_TYPE (new_rhs)))
3120 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3121 if (new_rhs
3122 && (!inplace
3123 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3125 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3126 changed = true;
3128 break;
3131 case GIMPLE_COND:
3132 changed |= fold_gimple_cond (stmt);
3133 break;
3135 case GIMPLE_CALL:
3136 changed |= gimple_fold_call (gsi, inplace);
3137 break;
3139 case GIMPLE_ASM:
3140 /* Fold *& in asm operands. */
3142 size_t noutputs;
3143 const char **oconstraints;
3144 const char *constraint;
3145 bool allows_mem, allows_reg;
3147 noutputs = gimple_asm_noutputs (stmt);
3148 oconstraints = XALLOCAVEC (const char *, noutputs);
3150 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3152 tree link = gimple_asm_output_op (stmt, i);
3153 tree op = TREE_VALUE (link);
3154 oconstraints[i]
3155 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3156 if (REFERENCE_CLASS_P (op)
3157 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3159 TREE_VALUE (link) = op;
3160 changed = true;
3163 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3165 tree link = gimple_asm_input_op (stmt, i);
3166 tree op = TREE_VALUE (link);
3167 constraint
3168 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3169 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3170 oconstraints, &allows_mem, &allows_reg);
3171 if (REFERENCE_CLASS_P (op)
3172 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3173 != NULL_TREE)
3175 TREE_VALUE (link) = op;
3176 changed = true;
3180 break;
3182 case GIMPLE_DEBUG:
3183 if (gimple_debug_bind_p (stmt))
3185 tree val = gimple_debug_bind_get_value (stmt);
3186 if (val
3187 && REFERENCE_CLASS_P (val))
3189 tree tem = maybe_fold_reference (val, false);
3190 if (tem)
3192 gimple_debug_bind_set_value (stmt, tem);
3193 changed = true;
3196 else if (val
3197 && TREE_CODE (val) == ADDR_EXPR)
3199 tree ref = TREE_OPERAND (val, 0);
3200 tree tem = maybe_fold_reference (ref, false);
3201 if (tem)
3203 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3204 gimple_debug_bind_set_value (stmt, tem);
3205 changed = true;
3209 break;
3211 default:;
3214 stmt = gsi_stmt (*gsi);
3216 /* Fold *& on the lhs. */
3217 if (gimple_has_lhs (stmt))
3219 tree lhs = gimple_get_lhs (stmt);
3220 if (lhs && REFERENCE_CLASS_P (lhs))
3222 tree new_lhs = maybe_fold_reference (lhs, true);
3223 if (new_lhs)
3225 gimple_set_lhs (stmt, new_lhs);
3226 changed = true;
3231 return changed;
3234 /* Valueziation callback that ends up not following SSA edges. */
3236 tree
3237 no_follow_ssa_edges (tree)
3239 return NULL_TREE;
3242 /* Fold the statement pointed to by GSI. In some cases, this function may
3243 replace the whole statement with a new one. Returns true iff folding
3244 makes any changes.
3245 The statement pointed to by GSI should be in valid gimple form but may
3246 be in unfolded state as resulting from for example constant propagation
3247 which can produce *&x = 0. */
3249 bool
3250 fold_stmt (gimple_stmt_iterator *gsi)
3252 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3255 bool
3256 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3258 return fold_stmt_1 (gsi, false, valueize);
3261 /* Perform the minimal folding on statement *GSI. Only operations like
3262 *&x created by constant propagation are handled. The statement cannot
3263 be replaced with a new one. Return true if the statement was
3264 changed, false otherwise.
3265 The statement *GSI should be in valid gimple form but may
3266 be in unfolded state as resulting from for example constant propagation
3267 which can produce *&x = 0. */
3269 bool
3270 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3272 gimple stmt = gsi_stmt (*gsi);
3273 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3274 gcc_assert (gsi_stmt (*gsi) == stmt);
3275 return changed;
3278 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3279 if EXPR is null or we don't know how.
3280 If non-null, the result always has boolean type. */
3282 static tree
3283 canonicalize_bool (tree expr, bool invert)
3285 if (!expr)
3286 return NULL_TREE;
3287 else if (invert)
3289 if (integer_nonzerop (expr))
3290 return boolean_false_node;
3291 else if (integer_zerop (expr))
3292 return boolean_true_node;
3293 else if (TREE_CODE (expr) == SSA_NAME)
3294 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3295 build_int_cst (TREE_TYPE (expr), 0));
3296 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3297 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3298 boolean_type_node,
3299 TREE_OPERAND (expr, 0),
3300 TREE_OPERAND (expr, 1));
3301 else
3302 return NULL_TREE;
3304 else
3306 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3307 return expr;
3308 if (integer_nonzerop (expr))
3309 return boolean_true_node;
3310 else if (integer_zerop (expr))
3311 return boolean_false_node;
3312 else if (TREE_CODE (expr) == SSA_NAME)
3313 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3314 build_int_cst (TREE_TYPE (expr), 0));
3315 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3316 return fold_build2 (TREE_CODE (expr),
3317 boolean_type_node,
3318 TREE_OPERAND (expr, 0),
3319 TREE_OPERAND (expr, 1));
3320 else
3321 return NULL_TREE;
3325 /* Check to see if a boolean expression EXPR is logically equivalent to the
3326 comparison (OP1 CODE OP2). Check for various identities involving
3327 SSA_NAMEs. */
3329 static bool
3330 same_bool_comparison_p (const_tree expr, enum tree_code code,
3331 const_tree op1, const_tree op2)
3333 gimple s;
3335 /* The obvious case. */
3336 if (TREE_CODE (expr) == code
3337 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3338 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3339 return true;
3341 /* Check for comparing (name, name != 0) and the case where expr
3342 is an SSA_NAME with a definition matching the comparison. */
3343 if (TREE_CODE (expr) == SSA_NAME
3344 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3346 if (operand_equal_p (expr, op1, 0))
3347 return ((code == NE_EXPR && integer_zerop (op2))
3348 || (code == EQ_EXPR && integer_nonzerop (op2)));
3349 s = SSA_NAME_DEF_STMT (expr);
3350 if (is_gimple_assign (s)
3351 && gimple_assign_rhs_code (s) == code
3352 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3353 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3354 return true;
3357 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3358 of name is a comparison, recurse. */
3359 if (TREE_CODE (op1) == SSA_NAME
3360 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3362 s = SSA_NAME_DEF_STMT (op1);
3363 if (is_gimple_assign (s)
3364 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3366 enum tree_code c = gimple_assign_rhs_code (s);
3367 if ((c == NE_EXPR && integer_zerop (op2))
3368 || (c == EQ_EXPR && integer_nonzerop (op2)))
3369 return same_bool_comparison_p (expr, c,
3370 gimple_assign_rhs1 (s),
3371 gimple_assign_rhs2 (s));
3372 if ((c == EQ_EXPR && integer_zerop (op2))
3373 || (c == NE_EXPR && integer_nonzerop (op2)))
3374 return same_bool_comparison_p (expr,
3375 invert_tree_comparison (c, false),
3376 gimple_assign_rhs1 (s),
3377 gimple_assign_rhs2 (s));
3380 return false;
3383 /* Check to see if two boolean expressions OP1 and OP2 are logically
3384 equivalent. */
3386 static bool
3387 same_bool_result_p (const_tree op1, const_tree op2)
3389 /* Simple cases first. */
3390 if (operand_equal_p (op1, op2, 0))
3391 return true;
3393 /* Check the cases where at least one of the operands is a comparison.
3394 These are a bit smarter than operand_equal_p in that they apply some
3395 identifies on SSA_NAMEs. */
3396 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3397 && same_bool_comparison_p (op1, TREE_CODE (op2),
3398 TREE_OPERAND (op2, 0),
3399 TREE_OPERAND (op2, 1)))
3400 return true;
3401 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3402 && same_bool_comparison_p (op2, TREE_CODE (op1),
3403 TREE_OPERAND (op1, 0),
3404 TREE_OPERAND (op1, 1)))
3405 return true;
3407 /* Default case. */
3408 return false;
3411 /* Forward declarations for some mutually recursive functions. */
3413 static tree
3414 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3415 enum tree_code code2, tree op2a, tree op2b);
3416 static tree
3417 and_var_with_comparison (tree var, bool invert,
3418 enum tree_code code2, tree op2a, tree op2b);
3419 static tree
3420 and_var_with_comparison_1 (gimple stmt,
3421 enum tree_code code2, tree op2a, tree op2b);
3422 static tree
3423 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3424 enum tree_code code2, tree op2a, tree op2b);
3425 static tree
3426 or_var_with_comparison (tree var, bool invert,
3427 enum tree_code code2, tree op2a, tree op2b);
3428 static tree
3429 or_var_with_comparison_1 (gimple stmt,
3430 enum tree_code code2, tree op2a, tree op2b);
3432 /* Helper function for and_comparisons_1: try to simplify the AND of the
3433 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3434 If INVERT is true, invert the value of the VAR before doing the AND.
3435 Return NULL_EXPR if we can't simplify this to a single expression. */
3437 static tree
3438 and_var_with_comparison (tree var, bool invert,
3439 enum tree_code code2, tree op2a, tree op2b)
3441 tree t;
3442 gimple stmt = SSA_NAME_DEF_STMT (var);
3444 /* We can only deal with variables whose definitions are assignments. */
3445 if (!is_gimple_assign (stmt))
3446 return NULL_TREE;
3448 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3449 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3450 Then we only have to consider the simpler non-inverted cases. */
3451 if (invert)
3452 t = or_var_with_comparison_1 (stmt,
3453 invert_tree_comparison (code2, false),
3454 op2a, op2b);
3455 else
3456 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3457 return canonicalize_bool (t, invert);
3460 /* Try to simplify the AND of the ssa variable defined by the assignment
3461 STMT with the comparison specified by (OP2A CODE2 OP2B).
3462 Return NULL_EXPR if we can't simplify this to a single expression. */
3464 static tree
3465 and_var_with_comparison_1 (gimple stmt,
3466 enum tree_code code2, tree op2a, tree op2b)
3468 tree var = gimple_assign_lhs (stmt);
3469 tree true_test_var = NULL_TREE;
3470 tree false_test_var = NULL_TREE;
3471 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3473 /* Check for identities like (var AND (var == 0)) => false. */
3474 if (TREE_CODE (op2a) == SSA_NAME
3475 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3477 if ((code2 == NE_EXPR && integer_zerop (op2b))
3478 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3480 true_test_var = op2a;
3481 if (var == true_test_var)
3482 return var;
3484 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3485 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3487 false_test_var = op2a;
3488 if (var == false_test_var)
3489 return boolean_false_node;
3493 /* If the definition is a comparison, recurse on it. */
3494 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3496 tree t = and_comparisons_1 (innercode,
3497 gimple_assign_rhs1 (stmt),
3498 gimple_assign_rhs2 (stmt),
3499 code2,
3500 op2a,
3501 op2b);
3502 if (t)
3503 return t;
3506 /* If the definition is an AND or OR expression, we may be able to
3507 simplify by reassociating. */
3508 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3509 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
3511 tree inner1 = gimple_assign_rhs1 (stmt);
3512 tree inner2 = gimple_assign_rhs2 (stmt);
3513 gimple s;
3514 tree t;
3515 tree partial = NULL_TREE;
3516 bool is_and = (innercode == BIT_AND_EXPR);
3518 /* Check for boolean identities that don't require recursive examination
3519 of inner1/inner2:
3520 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3521 inner1 AND (inner1 OR inner2) => inner1
3522 !inner1 AND (inner1 AND inner2) => false
3523 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3524 Likewise for similar cases involving inner2. */
3525 if (inner1 == true_test_var)
3526 return (is_and ? var : inner1);
3527 else if (inner2 == true_test_var)
3528 return (is_and ? var : inner2);
3529 else if (inner1 == false_test_var)
3530 return (is_and
3531 ? boolean_false_node
3532 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
3533 else if (inner2 == false_test_var)
3534 return (is_and
3535 ? boolean_false_node
3536 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
3538 /* Next, redistribute/reassociate the AND across the inner tests.
3539 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3540 if (TREE_CODE (inner1) == SSA_NAME
3541 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3542 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3543 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3544 gimple_assign_rhs1 (s),
3545 gimple_assign_rhs2 (s),
3546 code2, op2a, op2b)))
3548 /* Handle the AND case, where we are reassociating:
3549 (inner1 AND inner2) AND (op2a code2 op2b)
3550 => (t AND inner2)
3551 If the partial result t is a constant, we win. Otherwise
3552 continue on to try reassociating with the other inner test. */
3553 if (is_and)
3555 if (integer_onep (t))
3556 return inner2;
3557 else if (integer_zerop (t))
3558 return boolean_false_node;
3561 /* Handle the OR case, where we are redistributing:
3562 (inner1 OR inner2) AND (op2a code2 op2b)
3563 => (t OR (inner2 AND (op2a code2 op2b))) */
3564 else if (integer_onep (t))
3565 return boolean_true_node;
3567 /* Save partial result for later. */
3568 partial = t;
3571 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3572 if (TREE_CODE (inner2) == SSA_NAME
3573 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3574 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3575 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3576 gimple_assign_rhs1 (s),
3577 gimple_assign_rhs2 (s),
3578 code2, op2a, op2b)))
3580 /* Handle the AND case, where we are reassociating:
3581 (inner1 AND inner2) AND (op2a code2 op2b)
3582 => (inner1 AND t) */
3583 if (is_and)
3585 if (integer_onep (t))
3586 return inner1;
3587 else if (integer_zerop (t))
3588 return boolean_false_node;
3589 /* If both are the same, we can apply the identity
3590 (x AND x) == x. */
3591 else if (partial && same_bool_result_p (t, partial))
3592 return t;
3595 /* Handle the OR case. where we are redistributing:
3596 (inner1 OR inner2) AND (op2a code2 op2b)
3597 => (t OR (inner1 AND (op2a code2 op2b)))
3598 => (t OR partial) */
3599 else
3601 if (integer_onep (t))
3602 return boolean_true_node;
3603 else if (partial)
3605 /* We already got a simplification for the other
3606 operand to the redistributed OR expression. The
3607 interesting case is when at least one is false.
3608 Or, if both are the same, we can apply the identity
3609 (x OR x) == x. */
3610 if (integer_zerop (partial))
3611 return t;
3612 else if (integer_zerop (t))
3613 return partial;
3614 else if (same_bool_result_p (t, partial))
3615 return t;
3620 return NULL_TREE;
3623 /* Try to simplify the AND of two comparisons defined by
3624 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3625 If this can be done without constructing an intermediate value,
3626 return the resulting tree; otherwise NULL_TREE is returned.
3627 This function is deliberately asymmetric as it recurses on SSA_DEFs
3628 in the first comparison but not the second. */
3630 static tree
3631 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3632 enum tree_code code2, tree op2a, tree op2b)
3634 tree truth_type = truth_type_for (TREE_TYPE (op1a));
3636 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3637 if (operand_equal_p (op1a, op2a, 0)
3638 && operand_equal_p (op1b, op2b, 0))
3640 /* Result will be either NULL_TREE, or a combined comparison. */
3641 tree t = combine_comparisons (UNKNOWN_LOCATION,
3642 TRUTH_ANDIF_EXPR, code1, code2,
3643 truth_type, op1a, op1b);
3644 if (t)
3645 return t;
3648 /* Likewise the swapped case of the above. */
3649 if (operand_equal_p (op1a, op2b, 0)
3650 && operand_equal_p (op1b, op2a, 0))
3652 /* Result will be either NULL_TREE, or a combined comparison. */
3653 tree t = combine_comparisons (UNKNOWN_LOCATION,
3654 TRUTH_ANDIF_EXPR, code1,
3655 swap_tree_comparison (code2),
3656 truth_type, op1a, op1b);
3657 if (t)
3658 return t;
3661 /* If both comparisons are of the same value against constants, we might
3662 be able to merge them. */
3663 if (operand_equal_p (op1a, op2a, 0)
3664 && TREE_CODE (op1b) == INTEGER_CST
3665 && TREE_CODE (op2b) == INTEGER_CST)
3667 int cmp = tree_int_cst_compare (op1b, op2b);
3669 /* If we have (op1a == op1b), we should either be able to
3670 return that or FALSE, depending on whether the constant op1b
3671 also satisfies the other comparison against op2b. */
3672 if (code1 == EQ_EXPR)
3674 bool done = true;
3675 bool val;
3676 switch (code2)
3678 case EQ_EXPR: val = (cmp == 0); break;
3679 case NE_EXPR: val = (cmp != 0); break;
3680 case LT_EXPR: val = (cmp < 0); break;
3681 case GT_EXPR: val = (cmp > 0); break;
3682 case LE_EXPR: val = (cmp <= 0); break;
3683 case GE_EXPR: val = (cmp >= 0); break;
3684 default: done = false;
3686 if (done)
3688 if (val)
3689 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3690 else
3691 return boolean_false_node;
3694 /* Likewise if the second comparison is an == comparison. */
3695 else if (code2 == EQ_EXPR)
3697 bool done = true;
3698 bool val;
3699 switch (code1)
3701 case EQ_EXPR: val = (cmp == 0); break;
3702 case NE_EXPR: val = (cmp != 0); break;
3703 case LT_EXPR: val = (cmp > 0); break;
3704 case GT_EXPR: val = (cmp < 0); break;
3705 case LE_EXPR: val = (cmp >= 0); break;
3706 case GE_EXPR: val = (cmp <= 0); break;
3707 default: done = false;
3709 if (done)
3711 if (val)
3712 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3713 else
3714 return boolean_false_node;
3718 /* Same business with inequality tests. */
3719 else if (code1 == NE_EXPR)
3721 bool val;
3722 switch (code2)
3724 case EQ_EXPR: val = (cmp != 0); break;
3725 case NE_EXPR: val = (cmp == 0); break;
3726 case LT_EXPR: val = (cmp >= 0); break;
3727 case GT_EXPR: val = (cmp <= 0); break;
3728 case LE_EXPR: val = (cmp > 0); break;
3729 case GE_EXPR: val = (cmp < 0); break;
3730 default:
3731 val = false;
3733 if (val)
3734 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3736 else if (code2 == NE_EXPR)
3738 bool val;
3739 switch (code1)
3741 case EQ_EXPR: val = (cmp == 0); break;
3742 case NE_EXPR: val = (cmp != 0); break;
3743 case LT_EXPR: val = (cmp <= 0); break;
3744 case GT_EXPR: val = (cmp >= 0); break;
3745 case LE_EXPR: val = (cmp < 0); break;
3746 case GE_EXPR: val = (cmp > 0); break;
3747 default:
3748 val = false;
3750 if (val)
3751 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3754 /* Chose the more restrictive of two < or <= comparisons. */
3755 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3756 && (code2 == LT_EXPR || code2 == LE_EXPR))
3758 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3759 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3760 else
3761 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3764 /* Likewise chose the more restrictive of two > or >= comparisons. */
3765 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3766 && (code2 == GT_EXPR || code2 == GE_EXPR))
3768 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3769 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3770 else
3771 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3774 /* Check for singleton ranges. */
3775 else if (cmp == 0
3776 && ((code1 == LE_EXPR && code2 == GE_EXPR)
3777 || (code1 == GE_EXPR && code2 == LE_EXPR)))
3778 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
3780 /* Check for disjoint ranges. */
3781 else if (cmp <= 0
3782 && (code1 == LT_EXPR || code1 == LE_EXPR)
3783 && (code2 == GT_EXPR || code2 == GE_EXPR))
3784 return boolean_false_node;
3785 else if (cmp >= 0
3786 && (code1 == GT_EXPR || code1 == GE_EXPR)
3787 && (code2 == LT_EXPR || code2 == LE_EXPR))
3788 return boolean_false_node;
3791 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3792 NAME's definition is a truth value. See if there are any simplifications
3793 that can be done against the NAME's definition. */
3794 if (TREE_CODE (op1a) == SSA_NAME
3795 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3796 && (integer_zerop (op1b) || integer_onep (op1b)))
3798 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3799 || (code1 == NE_EXPR && integer_onep (op1b)));
3800 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3801 switch (gimple_code (stmt))
3803 case GIMPLE_ASSIGN:
3804 /* Try to simplify by copy-propagating the definition. */
3805 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
3807 case GIMPLE_PHI:
3808 /* If every argument to the PHI produces the same result when
3809 ANDed with the second comparison, we win.
3810 Do not do this unless the type is bool since we need a bool
3811 result here anyway. */
3812 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3814 tree result = NULL_TREE;
3815 unsigned i;
3816 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3818 tree arg = gimple_phi_arg_def (stmt, i);
3820 /* If this PHI has itself as an argument, ignore it.
3821 If all the other args produce the same result,
3822 we're still OK. */
3823 if (arg == gimple_phi_result (stmt))
3824 continue;
3825 else if (TREE_CODE (arg) == INTEGER_CST)
3827 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
3829 if (!result)
3830 result = boolean_false_node;
3831 else if (!integer_zerop (result))
3832 return NULL_TREE;
3834 else if (!result)
3835 result = fold_build2 (code2, boolean_type_node,
3836 op2a, op2b);
3837 else if (!same_bool_comparison_p (result,
3838 code2, op2a, op2b))
3839 return NULL_TREE;
3841 else if (TREE_CODE (arg) == SSA_NAME
3842 && !SSA_NAME_IS_DEFAULT_DEF (arg))
3844 tree temp;
3845 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3846 /* In simple cases we can look through PHI nodes,
3847 but we have to be careful with loops.
3848 See PR49073. */
3849 if (! dom_info_available_p (CDI_DOMINATORS)
3850 || gimple_bb (def_stmt) == gimple_bb (stmt)
3851 || dominated_by_p (CDI_DOMINATORS,
3852 gimple_bb (def_stmt),
3853 gimple_bb (stmt)))
3854 return NULL_TREE;
3855 temp = and_var_with_comparison (arg, invert, code2,
3856 op2a, op2b);
3857 if (!temp)
3858 return NULL_TREE;
3859 else if (!result)
3860 result = temp;
3861 else if (!same_bool_result_p (result, temp))
3862 return NULL_TREE;
3864 else
3865 return NULL_TREE;
3867 return result;
3870 default:
3871 break;
3874 return NULL_TREE;
3877 /* Try to simplify the AND of two comparisons, specified by
3878 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3879 If this can be simplified to a single expression (without requiring
3880 introducing more SSA variables to hold intermediate values),
3881 return the resulting tree. Otherwise return NULL_TREE.
3882 If the result expression is non-null, it has boolean type. */
3884 tree
3885 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
3886 enum tree_code code2, tree op2a, tree op2b)
3888 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
3889 if (t)
3890 return t;
3891 else
3892 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
3895 /* Helper function for or_comparisons_1: try to simplify the OR of the
3896 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3897 If INVERT is true, invert the value of VAR before doing the OR.
3898 Return NULL_EXPR if we can't simplify this to a single expression. */
3900 static tree
3901 or_var_with_comparison (tree var, bool invert,
3902 enum tree_code code2, tree op2a, tree op2b)
3904 tree t;
3905 gimple stmt = SSA_NAME_DEF_STMT (var);
3907 /* We can only deal with variables whose definitions are assignments. */
3908 if (!is_gimple_assign (stmt))
3909 return NULL_TREE;
3911 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3912 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3913 Then we only have to consider the simpler non-inverted cases. */
3914 if (invert)
3915 t = and_var_with_comparison_1 (stmt,
3916 invert_tree_comparison (code2, false),
3917 op2a, op2b);
3918 else
3919 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
3920 return canonicalize_bool (t, invert);
3923 /* Try to simplify the OR of the ssa variable defined by the assignment
3924 STMT with the comparison specified by (OP2A CODE2 OP2B).
3925 Return NULL_EXPR if we can't simplify this to a single expression. */
3927 static tree
3928 or_var_with_comparison_1 (gimple stmt,
3929 enum tree_code code2, tree op2a, tree op2b)
3931 tree var = gimple_assign_lhs (stmt);
3932 tree true_test_var = NULL_TREE;
3933 tree false_test_var = NULL_TREE;
3934 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3936 /* Check for identities like (var OR (var != 0)) => true . */
3937 if (TREE_CODE (op2a) == SSA_NAME
3938 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3940 if ((code2 == NE_EXPR && integer_zerop (op2b))
3941 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3943 true_test_var = op2a;
3944 if (var == true_test_var)
3945 return var;
3947 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3948 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3950 false_test_var = op2a;
3951 if (var == false_test_var)
3952 return boolean_true_node;
3956 /* If the definition is a comparison, recurse on it. */
3957 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3959 tree t = or_comparisons_1 (innercode,
3960 gimple_assign_rhs1 (stmt),
3961 gimple_assign_rhs2 (stmt),
3962 code2,
3963 op2a,
3964 op2b);
3965 if (t)
3966 return t;
3969 /* If the definition is an AND or OR expression, we may be able to
3970 simplify by reassociating. */
3971 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3972 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
3974 tree inner1 = gimple_assign_rhs1 (stmt);
3975 tree inner2 = gimple_assign_rhs2 (stmt);
3976 gimple s;
3977 tree t;
3978 tree partial = NULL_TREE;
3979 bool is_or = (innercode == BIT_IOR_EXPR);
3981 /* Check for boolean identities that don't require recursive examination
3982 of inner1/inner2:
3983 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
3984 inner1 OR (inner1 AND inner2) => inner1
3985 !inner1 OR (inner1 OR inner2) => true
3986 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
3988 if (inner1 == true_test_var)
3989 return (is_or ? var : inner1);
3990 else if (inner2 == true_test_var)
3991 return (is_or ? var : inner2);
3992 else if (inner1 == false_test_var)
3993 return (is_or
3994 ? boolean_true_node
3995 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
3996 else if (inner2 == false_test_var)
3997 return (is_or
3998 ? boolean_true_node
3999 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4001 /* Next, redistribute/reassociate the OR across the inner tests.
4002 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4003 if (TREE_CODE (inner1) == SSA_NAME
4004 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4005 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4006 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4007 gimple_assign_rhs1 (s),
4008 gimple_assign_rhs2 (s),
4009 code2, op2a, op2b)))
4011 /* Handle the OR case, where we are reassociating:
4012 (inner1 OR inner2) OR (op2a code2 op2b)
4013 => (t OR inner2)
4014 If the partial result t is a constant, we win. Otherwise
4015 continue on to try reassociating with the other inner test. */
4016 if (is_or)
4018 if (integer_onep (t))
4019 return boolean_true_node;
4020 else if (integer_zerop (t))
4021 return inner2;
4024 /* Handle the AND case, where we are redistributing:
4025 (inner1 AND inner2) OR (op2a code2 op2b)
4026 => (t AND (inner2 OR (op2a code op2b))) */
4027 else if (integer_zerop (t))
4028 return boolean_false_node;
4030 /* Save partial result for later. */
4031 partial = t;
4034 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4035 if (TREE_CODE (inner2) == SSA_NAME
4036 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4037 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4038 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4039 gimple_assign_rhs1 (s),
4040 gimple_assign_rhs2 (s),
4041 code2, op2a, op2b)))
4043 /* Handle the OR case, where we are reassociating:
4044 (inner1 OR inner2) OR (op2a code2 op2b)
4045 => (inner1 OR t)
4046 => (t OR partial) */
4047 if (is_or)
4049 if (integer_zerop (t))
4050 return inner1;
4051 else if (integer_onep (t))
4052 return boolean_true_node;
4053 /* If both are the same, we can apply the identity
4054 (x OR x) == x. */
4055 else if (partial && same_bool_result_p (t, partial))
4056 return t;
4059 /* Handle the AND case, where we are redistributing:
4060 (inner1 AND inner2) OR (op2a code2 op2b)
4061 => (t AND (inner1 OR (op2a code2 op2b)))
4062 => (t AND partial) */
4063 else
4065 if (integer_zerop (t))
4066 return boolean_false_node;
4067 else if (partial)
4069 /* We already got a simplification for the other
4070 operand to the redistributed AND expression. The
4071 interesting case is when at least one is true.
4072 Or, if both are the same, we can apply the identity
4073 (x AND x) == x. */
4074 if (integer_onep (partial))
4075 return t;
4076 else if (integer_onep (t))
4077 return partial;
4078 else if (same_bool_result_p (t, partial))
4079 return t;
4084 return NULL_TREE;
4087 /* Try to simplify the OR of two comparisons defined by
4088 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4089 If this can be done without constructing an intermediate value,
4090 return the resulting tree; otherwise NULL_TREE is returned.
4091 This function is deliberately asymmetric as it recurses on SSA_DEFs
4092 in the first comparison but not the second. */
4094 static tree
4095 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4096 enum tree_code code2, tree op2a, tree op2b)
4098 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4100 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4101 if (operand_equal_p (op1a, op2a, 0)
4102 && operand_equal_p (op1b, op2b, 0))
4104 /* Result will be either NULL_TREE, or a combined comparison. */
4105 tree t = combine_comparisons (UNKNOWN_LOCATION,
4106 TRUTH_ORIF_EXPR, code1, code2,
4107 truth_type, op1a, op1b);
4108 if (t)
4109 return t;
4112 /* Likewise the swapped case of the above. */
4113 if (operand_equal_p (op1a, op2b, 0)
4114 && operand_equal_p (op1b, op2a, 0))
4116 /* Result will be either NULL_TREE, or a combined comparison. */
4117 tree t = combine_comparisons (UNKNOWN_LOCATION,
4118 TRUTH_ORIF_EXPR, code1,
4119 swap_tree_comparison (code2),
4120 truth_type, op1a, op1b);
4121 if (t)
4122 return t;
4125 /* If both comparisons are of the same value against constants, we might
4126 be able to merge them. */
4127 if (operand_equal_p (op1a, op2a, 0)
4128 && TREE_CODE (op1b) == INTEGER_CST
4129 && TREE_CODE (op2b) == INTEGER_CST)
4131 int cmp = tree_int_cst_compare (op1b, op2b);
4133 /* If we have (op1a != op1b), we should either be able to
4134 return that or TRUE, depending on whether the constant op1b
4135 also satisfies the other comparison against op2b. */
4136 if (code1 == NE_EXPR)
4138 bool done = true;
4139 bool val;
4140 switch (code2)
4142 case EQ_EXPR: val = (cmp == 0); break;
4143 case NE_EXPR: val = (cmp != 0); break;
4144 case LT_EXPR: val = (cmp < 0); break;
4145 case GT_EXPR: val = (cmp > 0); break;
4146 case LE_EXPR: val = (cmp <= 0); break;
4147 case GE_EXPR: val = (cmp >= 0); break;
4148 default: done = false;
4150 if (done)
4152 if (val)
4153 return boolean_true_node;
4154 else
4155 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4158 /* Likewise if the second comparison is a != comparison. */
4159 else if (code2 == NE_EXPR)
4161 bool done = true;
4162 bool val;
4163 switch (code1)
4165 case EQ_EXPR: val = (cmp == 0); break;
4166 case NE_EXPR: val = (cmp != 0); break;
4167 case LT_EXPR: val = (cmp > 0); break;
4168 case GT_EXPR: val = (cmp < 0); break;
4169 case LE_EXPR: val = (cmp >= 0); break;
4170 case GE_EXPR: val = (cmp <= 0); break;
4171 default: done = false;
4173 if (done)
4175 if (val)
4176 return boolean_true_node;
4177 else
4178 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4182 /* See if an equality test is redundant with the other comparison. */
4183 else if (code1 == EQ_EXPR)
4185 bool val;
4186 switch (code2)
4188 case EQ_EXPR: val = (cmp == 0); break;
4189 case NE_EXPR: val = (cmp != 0); break;
4190 case LT_EXPR: val = (cmp < 0); break;
4191 case GT_EXPR: val = (cmp > 0); break;
4192 case LE_EXPR: val = (cmp <= 0); break;
4193 case GE_EXPR: val = (cmp >= 0); break;
4194 default:
4195 val = false;
4197 if (val)
4198 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4200 else if (code2 == EQ_EXPR)
4202 bool val;
4203 switch (code1)
4205 case EQ_EXPR: val = (cmp == 0); break;
4206 case NE_EXPR: val = (cmp != 0); break;
4207 case LT_EXPR: val = (cmp > 0); break;
4208 case GT_EXPR: val = (cmp < 0); break;
4209 case LE_EXPR: val = (cmp >= 0); break;
4210 case GE_EXPR: val = (cmp <= 0); break;
4211 default:
4212 val = false;
4214 if (val)
4215 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4218 /* Chose the less restrictive of two < or <= comparisons. */
4219 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4220 && (code2 == LT_EXPR || code2 == LE_EXPR))
4222 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4223 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4224 else
4225 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4228 /* Likewise chose the less restrictive of two > or >= comparisons. */
4229 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4230 && (code2 == GT_EXPR || code2 == GE_EXPR))
4232 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4233 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4234 else
4235 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4238 /* Check for singleton ranges. */
4239 else if (cmp == 0
4240 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4241 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4242 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4244 /* Check for less/greater pairs that don't restrict the range at all. */
4245 else if (cmp >= 0
4246 && (code1 == LT_EXPR || code1 == LE_EXPR)
4247 && (code2 == GT_EXPR || code2 == GE_EXPR))
4248 return boolean_true_node;
4249 else if (cmp <= 0
4250 && (code1 == GT_EXPR || code1 == GE_EXPR)
4251 && (code2 == LT_EXPR || code2 == LE_EXPR))
4252 return boolean_true_node;
4255 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4256 NAME's definition is a truth value. See if there are any simplifications
4257 that can be done against the NAME's definition. */
4258 if (TREE_CODE (op1a) == SSA_NAME
4259 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4260 && (integer_zerop (op1b) || integer_onep (op1b)))
4262 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4263 || (code1 == NE_EXPR && integer_onep (op1b)));
4264 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4265 switch (gimple_code (stmt))
4267 case GIMPLE_ASSIGN:
4268 /* Try to simplify by copy-propagating the definition. */
4269 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4271 case GIMPLE_PHI:
4272 /* If every argument to the PHI produces the same result when
4273 ORed with the second comparison, we win.
4274 Do not do this unless the type is bool since we need a bool
4275 result here anyway. */
4276 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4278 tree result = NULL_TREE;
4279 unsigned i;
4280 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4282 tree arg = gimple_phi_arg_def (stmt, i);
4284 /* If this PHI has itself as an argument, ignore it.
4285 If all the other args produce the same result,
4286 we're still OK. */
4287 if (arg == gimple_phi_result (stmt))
4288 continue;
4289 else if (TREE_CODE (arg) == INTEGER_CST)
4291 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4293 if (!result)
4294 result = boolean_true_node;
4295 else if (!integer_onep (result))
4296 return NULL_TREE;
4298 else if (!result)
4299 result = fold_build2 (code2, boolean_type_node,
4300 op2a, op2b);
4301 else if (!same_bool_comparison_p (result,
4302 code2, op2a, op2b))
4303 return NULL_TREE;
4305 else if (TREE_CODE (arg) == SSA_NAME
4306 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4308 tree temp;
4309 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4310 /* In simple cases we can look through PHI nodes,
4311 but we have to be careful with loops.
4312 See PR49073. */
4313 if (! dom_info_available_p (CDI_DOMINATORS)
4314 || gimple_bb (def_stmt) == gimple_bb (stmt)
4315 || dominated_by_p (CDI_DOMINATORS,
4316 gimple_bb (def_stmt),
4317 gimple_bb (stmt)))
4318 return NULL_TREE;
4319 temp = or_var_with_comparison (arg, invert, code2,
4320 op2a, op2b);
4321 if (!temp)
4322 return NULL_TREE;
4323 else if (!result)
4324 result = temp;
4325 else if (!same_bool_result_p (result, temp))
4326 return NULL_TREE;
4328 else
4329 return NULL_TREE;
4331 return result;
4334 default:
4335 break;
4338 return NULL_TREE;
4341 /* Try to simplify the OR of two comparisons, specified by
4342 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4343 If this can be simplified to a single expression (without requiring
4344 introducing more SSA variables to hold intermediate values),
4345 return the resulting tree. Otherwise return NULL_TREE.
4346 If the result expression is non-null, it has boolean type. */
4348 tree
4349 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4350 enum tree_code code2, tree op2a, tree op2b)
4352 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4353 if (t)
4354 return t;
4355 else
4356 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4360 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4362 Either NULL_TREE, a simplified but non-constant or a constant
4363 is returned.
4365 ??? This should go into a gimple-fold-inline.h file to be eventually
4366 privatized with the single valueize function used in the various TUs
4367 to avoid the indirect function call overhead. */
4369 tree
4370 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
4372 location_t loc = gimple_location (stmt);
4373 switch (gimple_code (stmt))
4375 case GIMPLE_ASSIGN:
4377 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4379 switch (get_gimple_rhs_class (subcode))
4381 case GIMPLE_SINGLE_RHS:
4383 tree rhs = gimple_assign_rhs1 (stmt);
4384 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4386 if (TREE_CODE (rhs) == SSA_NAME)
4388 /* If the RHS is an SSA_NAME, return its known constant value,
4389 if any. */
4390 return (*valueize) (rhs);
4392 /* Handle propagating invariant addresses into address
4393 operations. */
4394 else if (TREE_CODE (rhs) == ADDR_EXPR
4395 && !is_gimple_min_invariant (rhs))
4397 HOST_WIDE_INT offset = 0;
4398 tree base;
4399 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4400 &offset,
4401 valueize);
4402 if (base
4403 && (CONSTANT_CLASS_P (base)
4404 || decl_address_invariant_p (base)))
4405 return build_invariant_address (TREE_TYPE (rhs),
4406 base, offset);
4408 else if (TREE_CODE (rhs) == CONSTRUCTOR
4409 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4410 && (CONSTRUCTOR_NELTS (rhs)
4411 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4413 unsigned i;
4414 tree val, *vec;
4416 vec = XALLOCAVEC (tree,
4417 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4418 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4420 val = (*valueize) (val);
4421 if (TREE_CODE (val) == INTEGER_CST
4422 || TREE_CODE (val) == REAL_CST
4423 || TREE_CODE (val) == FIXED_CST)
4424 vec[i] = val;
4425 else
4426 return NULL_TREE;
4429 return build_vector (TREE_TYPE (rhs), vec);
4431 if (subcode == OBJ_TYPE_REF)
4433 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4434 /* If callee is constant, we can fold away the wrapper. */
4435 if (is_gimple_min_invariant (val))
4436 return val;
4439 if (kind == tcc_reference)
4441 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4442 || TREE_CODE (rhs) == REALPART_EXPR
4443 || TREE_CODE (rhs) == IMAGPART_EXPR)
4444 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4446 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4447 return fold_unary_loc (EXPR_LOCATION (rhs),
4448 TREE_CODE (rhs),
4449 TREE_TYPE (rhs), val);
4451 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4452 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4454 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4455 return fold_ternary_loc (EXPR_LOCATION (rhs),
4456 TREE_CODE (rhs),
4457 TREE_TYPE (rhs), val,
4458 TREE_OPERAND (rhs, 1),
4459 TREE_OPERAND (rhs, 2));
4461 else if (TREE_CODE (rhs) == MEM_REF
4462 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4464 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4465 if (TREE_CODE (val) == ADDR_EXPR
4466 && is_gimple_min_invariant (val))
4468 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4469 unshare_expr (val),
4470 TREE_OPERAND (rhs, 1));
4471 if (tem)
4472 rhs = tem;
4475 return fold_const_aggregate_ref_1 (rhs, valueize);
4477 else if (kind == tcc_declaration)
4478 return get_symbol_constant_value (rhs);
4479 return rhs;
4482 case GIMPLE_UNARY_RHS:
4484 /* Handle unary operators that can appear in GIMPLE form.
4485 Note that we know the single operand must be a constant,
4486 so this should almost always return a simplified RHS. */
4487 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4489 return
4490 fold_unary_ignore_overflow_loc (loc, subcode,
4491 gimple_expr_type (stmt), op0);
4494 case GIMPLE_BINARY_RHS:
4496 /* Handle binary operators that can appear in GIMPLE form. */
4497 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4498 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4500 /* Translate &x + CST into an invariant form suitable for
4501 further propagation. */
4502 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
4503 && TREE_CODE (op0) == ADDR_EXPR
4504 && TREE_CODE (op1) == INTEGER_CST)
4506 tree off = fold_convert (ptr_type_node, op1);
4507 return build_fold_addr_expr_loc
4508 (loc,
4509 fold_build2 (MEM_REF,
4510 TREE_TYPE (TREE_TYPE (op0)),
4511 unshare_expr (op0), off));
4514 return fold_binary_loc (loc, subcode,
4515 gimple_expr_type (stmt), op0, op1);
4518 case GIMPLE_TERNARY_RHS:
4520 /* Handle ternary operators that can appear in GIMPLE form. */
4521 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4522 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4523 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
4525 /* Fold embedded expressions in ternary codes. */
4526 if ((subcode == COND_EXPR
4527 || subcode == VEC_COND_EXPR)
4528 && COMPARISON_CLASS_P (op0))
4530 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
4531 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
4532 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
4533 TREE_TYPE (op0), op00, op01);
4534 if (tem)
4535 op0 = tem;
4538 return fold_ternary_loc (loc, subcode,
4539 gimple_expr_type (stmt), op0, op1, op2);
4542 default:
4543 gcc_unreachable ();
4547 case GIMPLE_CALL:
4549 tree fn;
4551 if (gimple_call_internal_p (stmt))
4553 enum tree_code subcode = ERROR_MARK;
4554 switch (gimple_call_internal_fn (stmt))
4556 case IFN_UBSAN_CHECK_ADD:
4557 subcode = PLUS_EXPR;
4558 break;
4559 case IFN_UBSAN_CHECK_SUB:
4560 subcode = MINUS_EXPR;
4561 break;
4562 case IFN_UBSAN_CHECK_MUL:
4563 subcode = MULT_EXPR;
4564 break;
4565 default:
4566 return NULL_TREE;
4568 tree arg0 = gimple_call_arg (stmt, 0);
4569 tree arg1 = gimple_call_arg (stmt, 1);
4570 tree op0 = (*valueize) (arg0);
4571 tree op1 = (*valueize) (arg1);
4573 if (TREE_CODE (op0) != INTEGER_CST
4574 || TREE_CODE (op1) != INTEGER_CST)
4576 switch (subcode)
4578 case MULT_EXPR:
4579 /* x * 0 = 0 * x = 0 without overflow. */
4580 if (integer_zerop (op0) || integer_zerop (op1))
4581 return build_zero_cst (TREE_TYPE (arg0));
4582 break;
4583 case MINUS_EXPR:
4584 /* y - y = 0 without overflow. */
4585 if (operand_equal_p (op0, op1, 0))
4586 return build_zero_cst (TREE_TYPE (arg0));
4587 break;
4588 default:
4589 break;
4592 tree res
4593 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
4594 if (res
4595 && TREE_CODE (res) == INTEGER_CST
4596 && !TREE_OVERFLOW (res))
4597 return res;
4598 return NULL_TREE;
4601 fn = (*valueize) (gimple_call_fn (stmt));
4602 if (TREE_CODE (fn) == ADDR_EXPR
4603 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4604 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
4605 && gimple_builtin_call_types_compatible_p (stmt,
4606 TREE_OPERAND (fn, 0)))
4608 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
4609 tree call, retval;
4610 unsigned i;
4611 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4612 args[i] = (*valueize) (gimple_call_arg (stmt, i));
4613 call = build_call_array_loc (loc,
4614 gimple_call_return_type (stmt),
4615 fn, gimple_call_num_args (stmt), args);
4616 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
4617 if (retval)
4619 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4620 STRIP_NOPS (retval);
4621 retval = fold_convert (gimple_call_return_type (stmt), retval);
4623 return retval;
4625 return NULL_TREE;
4628 default:
4629 return NULL_TREE;
4633 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4634 Returns NULL_TREE if folding to a constant is not possible, otherwise
4635 returns a constant according to is_gimple_min_invariant. */
4637 tree
4638 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
4640 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
4641 if (res && is_gimple_min_invariant (res))
4642 return res;
4643 return NULL_TREE;
4647 /* The following set of functions are supposed to fold references using
4648 their constant initializers. */
4650 static tree fold_ctor_reference (tree type, tree ctor,
4651 unsigned HOST_WIDE_INT offset,
4652 unsigned HOST_WIDE_INT size, tree);
4654 /* See if we can find constructor defining value of BASE.
4655 When we know the consructor with constant offset (such as
4656 base is array[40] and we do know constructor of array), then
4657 BIT_OFFSET is adjusted accordingly.
4659 As a special case, return error_mark_node when constructor
4660 is not explicitly available, but it is known to be zero
4661 such as 'static const int a;'. */
4662 static tree
4663 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
4664 tree (*valueize)(tree))
4666 HOST_WIDE_INT bit_offset2, size, max_size;
4667 if (TREE_CODE (base) == MEM_REF)
4669 if (!integer_zerop (TREE_OPERAND (base, 1)))
4671 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
4672 return NULL_TREE;
4673 *bit_offset += (mem_ref_offset (base).to_short_addr ()
4674 * BITS_PER_UNIT);
4677 if (valueize
4678 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
4679 base = valueize (TREE_OPERAND (base, 0));
4680 if (!base || TREE_CODE (base) != ADDR_EXPR)
4681 return NULL_TREE;
4682 base = TREE_OPERAND (base, 0);
4685 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4686 DECL_INITIAL. If BASE is a nested reference into another
4687 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4688 the inner reference. */
4689 switch (TREE_CODE (base))
4691 case VAR_DECL:
4692 case CONST_DECL:
4694 tree init = ctor_for_folding (base);
4696 /* Our semantic is exact opposite of ctor_for_folding;
4697 NULL means unknown, while error_mark_node is 0. */
4698 if (init == error_mark_node)
4699 return NULL_TREE;
4700 if (!init)
4701 return error_mark_node;
4702 return init;
4705 case ARRAY_REF:
4706 case COMPONENT_REF:
4707 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
4708 if (max_size == -1 || size != max_size)
4709 return NULL_TREE;
4710 *bit_offset += bit_offset2;
4711 return get_base_constructor (base, bit_offset, valueize);
4713 case STRING_CST:
4714 case CONSTRUCTOR:
4715 return base;
4717 default:
4718 return NULL_TREE;
4722 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4723 SIZE to the memory at bit OFFSET. */
4725 static tree
4726 fold_array_ctor_reference (tree type, tree ctor,
4727 unsigned HOST_WIDE_INT offset,
4728 unsigned HOST_WIDE_INT size,
4729 tree from_decl)
4731 unsigned HOST_WIDE_INT cnt;
4732 tree cfield, cval;
4733 offset_int low_bound;
4734 offset_int elt_size;
4735 offset_int index, max_index;
4736 offset_int access_index;
4737 tree domain_type = NULL_TREE, index_type = NULL_TREE;
4738 HOST_WIDE_INT inner_offset;
4740 /* Compute low bound and elt size. */
4741 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
4742 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
4743 if (domain_type && TYPE_MIN_VALUE (domain_type))
4745 /* Static constructors for variably sized objects makes no sense. */
4746 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
4747 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
4748 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
4750 else
4751 low_bound = 0;
4752 /* Static constructors for variably sized objects makes no sense. */
4753 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
4754 == INTEGER_CST);
4755 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
4757 /* We can handle only constantly sized accesses that are known to not
4758 be larger than size of array element. */
4759 if (!TYPE_SIZE_UNIT (type)
4760 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
4761 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
4762 || elt_size == 0)
4763 return NULL_TREE;
4765 /* Compute the array index we look for. */
4766 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
4767 elt_size);
4768 access_index += low_bound;
4769 if (index_type)
4770 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
4771 TYPE_SIGN (index_type));
4773 /* And offset within the access. */
4774 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
4776 /* See if the array field is large enough to span whole access. We do not
4777 care to fold accesses spanning multiple array indexes. */
4778 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
4779 return NULL_TREE;
4781 index = low_bound - 1;
4782 if (index_type)
4783 index = wi::ext (index, TYPE_PRECISION (index_type),
4784 TYPE_SIGN (index_type));
4786 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
4788 /* Array constructor might explicitely set index, or specify range
4789 or leave index NULL meaning that it is next index after previous
4790 one. */
4791 if (cfield)
4793 if (TREE_CODE (cfield) == INTEGER_CST)
4794 max_index = index = wi::to_offset (cfield);
4795 else
4797 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
4798 index = wi::to_offset (TREE_OPERAND (cfield, 0));
4799 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
4802 else
4804 index += 1;
4805 if (index_type)
4806 index = wi::ext (index, TYPE_PRECISION (index_type),
4807 TYPE_SIGN (index_type));
4808 max_index = index;
4811 /* Do we have match? */
4812 if (wi::cmpu (access_index, index) >= 0
4813 && wi::cmpu (access_index, max_index) <= 0)
4814 return fold_ctor_reference (type, cval, inner_offset, size,
4815 from_decl);
4817 /* When memory is not explicitely mentioned in constructor,
4818 it is 0 (or out of range). */
4819 return build_zero_cst (type);
4822 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4823 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4825 static tree
4826 fold_nonarray_ctor_reference (tree type, tree ctor,
4827 unsigned HOST_WIDE_INT offset,
4828 unsigned HOST_WIDE_INT size,
4829 tree from_decl)
4831 unsigned HOST_WIDE_INT cnt;
4832 tree cfield, cval;
4834 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
4835 cval)
4837 tree byte_offset = DECL_FIELD_OFFSET (cfield);
4838 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
4839 tree field_size = DECL_SIZE (cfield);
4840 offset_int bitoffset;
4841 offset_int bitoffset_end, access_end;
4843 /* Variable sized objects in static constructors makes no sense,
4844 but field_size can be NULL for flexible array members. */
4845 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
4846 && TREE_CODE (byte_offset) == INTEGER_CST
4847 && (field_size != NULL_TREE
4848 ? TREE_CODE (field_size) == INTEGER_CST
4849 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
4851 /* Compute bit offset of the field. */
4852 bitoffset = (wi::to_offset (field_offset)
4853 + wi::lshift (wi::to_offset (byte_offset),
4854 LOG2_BITS_PER_UNIT));
4855 /* Compute bit offset where the field ends. */
4856 if (field_size != NULL_TREE)
4857 bitoffset_end = bitoffset + wi::to_offset (field_size);
4858 else
4859 bitoffset_end = 0;
4861 access_end = offset_int (offset) + size;
4863 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4864 [BITOFFSET, BITOFFSET_END)? */
4865 if (wi::cmps (access_end, bitoffset) > 0
4866 && (field_size == NULL_TREE
4867 || wi::lts_p (offset, bitoffset_end)))
4869 offset_int inner_offset = offset_int (offset) - bitoffset;
4870 /* We do have overlap. Now see if field is large enough to
4871 cover the access. Give up for accesses spanning multiple
4872 fields. */
4873 if (wi::cmps (access_end, bitoffset_end) > 0)
4874 return NULL_TREE;
4875 if (wi::lts_p (offset, bitoffset))
4876 return NULL_TREE;
4877 return fold_ctor_reference (type, cval,
4878 inner_offset.to_uhwi (), size,
4879 from_decl);
4882 /* When memory is not explicitely mentioned in constructor, it is 0. */
4883 return build_zero_cst (type);
4886 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4887 to the memory at bit OFFSET. */
4889 static tree
4890 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
4891 unsigned HOST_WIDE_INT size, tree from_decl)
4893 tree ret;
4895 /* We found the field with exact match. */
4896 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
4897 && !offset)
4898 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
4900 /* We are at the end of walk, see if we can view convert the
4901 result. */
4902 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
4903 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
4904 && !compare_tree_int (TYPE_SIZE (type), size)
4905 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
4907 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
4908 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
4909 if (ret)
4910 STRIP_NOPS (ret);
4911 return ret;
4913 /* For constants and byte-aligned/sized reads try to go through
4914 native_encode/interpret. */
4915 if (CONSTANT_CLASS_P (ctor)
4916 && BITS_PER_UNIT == 8
4917 && offset % BITS_PER_UNIT == 0
4918 && size % BITS_PER_UNIT == 0
4919 && size <= MAX_BITSIZE_MODE_ANY_MODE)
4921 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
4922 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
4923 offset / BITS_PER_UNIT) > 0)
4924 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
4926 if (TREE_CODE (ctor) == CONSTRUCTOR)
4929 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
4930 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
4931 return fold_array_ctor_reference (type, ctor, offset, size,
4932 from_decl);
4933 else
4934 return fold_nonarray_ctor_reference (type, ctor, offset, size,
4935 from_decl);
4938 return NULL_TREE;
4941 /* Return the tree representing the element referenced by T if T is an
4942 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4943 names using VALUEIZE. Return NULL_TREE otherwise. */
4945 tree
4946 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
4948 tree ctor, idx, base;
4949 HOST_WIDE_INT offset, size, max_size;
4950 tree tem;
4952 if (TREE_THIS_VOLATILE (t))
4953 return NULL_TREE;
4955 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4956 return get_symbol_constant_value (t);
4958 tem = fold_read_from_constant_string (t);
4959 if (tem)
4960 return tem;
4962 switch (TREE_CODE (t))
4964 case ARRAY_REF:
4965 case ARRAY_RANGE_REF:
4966 /* Constant indexes are handled well by get_base_constructor.
4967 Only special case variable offsets.
4968 FIXME: This code can't handle nested references with variable indexes
4969 (they will be handled only by iteration of ccp). Perhaps we can bring
4970 get_ref_base_and_extent here and make it use a valueize callback. */
4971 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
4972 && valueize
4973 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
4974 && TREE_CODE (idx) == INTEGER_CST)
4976 tree low_bound, unit_size;
4978 /* If the resulting bit-offset is constant, track it. */
4979 if ((low_bound = array_ref_low_bound (t),
4980 TREE_CODE (low_bound) == INTEGER_CST)
4981 && (unit_size = array_ref_element_size (t),
4982 tree_fits_uhwi_p (unit_size)))
4984 offset_int woffset
4985 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
4986 TYPE_PRECISION (TREE_TYPE (idx)));
4988 if (wi::fits_shwi_p (woffset))
4990 offset = woffset.to_shwi ();
4991 /* TODO: This code seems wrong, multiply then check
4992 to see if it fits. */
4993 offset *= tree_to_uhwi (unit_size);
4994 offset *= BITS_PER_UNIT;
4996 base = TREE_OPERAND (t, 0);
4997 ctor = get_base_constructor (base, &offset, valueize);
4998 /* Empty constructor. Always fold to 0. */
4999 if (ctor == error_mark_node)
5000 return build_zero_cst (TREE_TYPE (t));
5001 /* Out of bound array access. Value is undefined,
5002 but don't fold. */
5003 if (offset < 0)
5004 return NULL_TREE;
5005 /* We can not determine ctor. */
5006 if (!ctor)
5007 return NULL_TREE;
5008 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5009 tree_to_uhwi (unit_size)
5010 * BITS_PER_UNIT,
5011 base);
5015 /* Fallthru. */
5017 case COMPONENT_REF:
5018 case BIT_FIELD_REF:
5019 case TARGET_MEM_REF:
5020 case MEM_REF:
5021 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5022 ctor = get_base_constructor (base, &offset, valueize);
5024 /* Empty constructor. Always fold to 0. */
5025 if (ctor == error_mark_node)
5026 return build_zero_cst (TREE_TYPE (t));
5027 /* We do not know precise address. */
5028 if (max_size == -1 || max_size != size)
5029 return NULL_TREE;
5030 /* We can not determine ctor. */
5031 if (!ctor)
5032 return NULL_TREE;
5034 /* Out of bound array access. Value is undefined, but don't fold. */
5035 if (offset < 0)
5036 return NULL_TREE;
5038 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5039 base);
5041 case REALPART_EXPR:
5042 case IMAGPART_EXPR:
5044 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5045 if (c && TREE_CODE (c) == COMPLEX_CST)
5046 return fold_build1_loc (EXPR_LOCATION (t),
5047 TREE_CODE (t), TREE_TYPE (t), c);
5048 break;
5051 default:
5052 break;
5055 return NULL_TREE;
5058 tree
5059 fold_const_aggregate_ref (tree t)
5061 return fold_const_aggregate_ref_1 (t, NULL);
5064 /* Lookup virtual method with index TOKEN in a virtual table V
5065 at OFFSET.
5066 Set CAN_REFER if non-NULL to false if method
5067 is not referable or if the virtual table is ill-formed (such as rewriten
5068 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5070 tree
5071 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5072 tree v,
5073 unsigned HOST_WIDE_INT offset,
5074 bool *can_refer)
5076 tree vtable = v, init, fn;
5077 unsigned HOST_WIDE_INT size;
5078 unsigned HOST_WIDE_INT elt_size, access_index;
5079 tree domain_type;
5081 if (can_refer)
5082 *can_refer = true;
5084 /* First of all double check we have virtual table. */
5085 if (TREE_CODE (v) != VAR_DECL
5086 || !DECL_VIRTUAL_P (v))
5088 gcc_assert (in_lto_p);
5089 /* Pass down that we lost track of the target. */
5090 if (can_refer)
5091 *can_refer = false;
5092 return NULL_TREE;
5095 init = ctor_for_folding (v);
5097 /* The virtual tables should always be born with constructors
5098 and we always should assume that they are avaialble for
5099 folding. At the moment we do not stream them in all cases,
5100 but it should never happen that ctor seem unreachable. */
5101 gcc_assert (init);
5102 if (init == error_mark_node)
5104 gcc_assert (in_lto_p);
5105 /* Pass down that we lost track of the target. */
5106 if (can_refer)
5107 *can_refer = false;
5108 return NULL_TREE;
5110 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5111 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5112 offset *= BITS_PER_UNIT;
5113 offset += token * size;
5115 /* Lookup the value in the constructor that is assumed to be array.
5116 This is equivalent to
5117 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5118 offset, size, NULL);
5119 but in a constant time. We expect that frontend produced a simple
5120 array without indexed initializers. */
5122 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5123 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5124 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5125 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5127 access_index = offset / BITS_PER_UNIT / elt_size;
5128 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5130 /* This code makes an assumption that there are no
5131 indexed fileds produced by C++ FE, so we can directly index the array. */
5132 if (access_index < CONSTRUCTOR_NELTS (init))
5134 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5135 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5136 STRIP_NOPS (fn);
5138 else
5139 fn = NULL;
5141 /* For type inconsistent program we may end up looking up virtual method
5142 in virtual table that does not contain TOKEN entries. We may overrun
5143 the virtual table and pick up a constant or RTTI info pointer.
5144 In any case the call is undefined. */
5145 if (!fn
5146 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5147 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5148 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5149 else
5151 fn = TREE_OPERAND (fn, 0);
5153 /* When cgraph node is missing and function is not public, we cannot
5154 devirtualize. This can happen in WHOPR when the actual method
5155 ends up in other partition, because we found devirtualization
5156 possibility too late. */
5157 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5159 if (can_refer)
5161 *can_refer = false;
5162 return fn;
5164 return NULL_TREE;
5168 /* Make sure we create a cgraph node for functions we'll reference.
5169 They can be non-existent if the reference comes from an entry
5170 of an external vtable for example. */
5171 cgraph_node::get_create (fn);
5173 return fn;
5176 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5177 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5178 KNOWN_BINFO carries the binfo describing the true type of
5179 OBJ_TYPE_REF_OBJECT(REF).
5180 Set CAN_REFER if non-NULL to false if method
5181 is not referable or if the virtual table is ill-formed (such as rewriten
5182 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5184 tree
5185 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5186 bool *can_refer)
5188 unsigned HOST_WIDE_INT offset;
5189 tree v;
5191 v = BINFO_VTABLE (known_binfo);
5192 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5193 if (!v)
5194 return NULL_TREE;
5196 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5198 if (can_refer)
5199 *can_refer = false;
5200 return NULL_TREE;
5202 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5205 /* Return true iff VAL is a gimple expression that is known to be
5206 non-negative. Restricted to floating-point inputs. */
5208 bool
5209 gimple_val_nonnegative_real_p (tree val)
5211 gimple def_stmt;
5213 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5215 /* Use existing logic for non-gimple trees. */
5216 if (tree_expr_nonnegative_p (val))
5217 return true;
5219 if (TREE_CODE (val) != SSA_NAME)
5220 return false;
5222 /* Currently we look only at the immediately defining statement
5223 to make this determination, since recursion on defining
5224 statements of operands can lead to quadratic behavior in the
5225 worst case. This is expected to catch almost all occurrences
5226 in practice. It would be possible to implement limited-depth
5227 recursion if important cases are lost. Alternatively, passes
5228 that need this information (such as the pow/powi lowering code
5229 in the cse_sincos pass) could be revised to provide it through
5230 dataflow propagation. */
5232 def_stmt = SSA_NAME_DEF_STMT (val);
5234 if (is_gimple_assign (def_stmt))
5236 tree op0, op1;
5238 /* See fold-const.c:tree_expr_nonnegative_p for additional
5239 cases that could be handled with recursion. */
5241 switch (gimple_assign_rhs_code (def_stmt))
5243 case ABS_EXPR:
5244 /* Always true for floating-point operands. */
5245 return true;
5247 case MULT_EXPR:
5248 /* True if the two operands are identical (since we are
5249 restricted to floating-point inputs). */
5250 op0 = gimple_assign_rhs1 (def_stmt);
5251 op1 = gimple_assign_rhs2 (def_stmt);
5253 if (op0 == op1
5254 || operand_equal_p (op0, op1, 0))
5255 return true;
5257 default:
5258 return false;
5261 else if (is_gimple_call (def_stmt))
5263 tree fndecl = gimple_call_fndecl (def_stmt);
5264 if (fndecl
5265 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5267 tree arg1;
5269 switch (DECL_FUNCTION_CODE (fndecl))
5271 CASE_FLT_FN (BUILT_IN_ACOS):
5272 CASE_FLT_FN (BUILT_IN_ACOSH):
5273 CASE_FLT_FN (BUILT_IN_CABS):
5274 CASE_FLT_FN (BUILT_IN_COSH):
5275 CASE_FLT_FN (BUILT_IN_ERFC):
5276 CASE_FLT_FN (BUILT_IN_EXP):
5277 CASE_FLT_FN (BUILT_IN_EXP10):
5278 CASE_FLT_FN (BUILT_IN_EXP2):
5279 CASE_FLT_FN (BUILT_IN_FABS):
5280 CASE_FLT_FN (BUILT_IN_FDIM):
5281 CASE_FLT_FN (BUILT_IN_HYPOT):
5282 CASE_FLT_FN (BUILT_IN_POW10):
5283 return true;
5285 CASE_FLT_FN (BUILT_IN_SQRT):
5286 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5287 nonnegative inputs. */
5288 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
5289 return true;
5291 break;
5293 CASE_FLT_FN (BUILT_IN_POWI):
5294 /* True if the second argument is an even integer. */
5295 arg1 = gimple_call_arg (def_stmt, 1);
5297 if (TREE_CODE (arg1) == INTEGER_CST
5298 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5299 return true;
5301 break;
5303 CASE_FLT_FN (BUILT_IN_POW):
5304 /* True if the second argument is an even integer-valued
5305 real. */
5306 arg1 = gimple_call_arg (def_stmt, 1);
5308 if (TREE_CODE (arg1) == REAL_CST)
5310 REAL_VALUE_TYPE c;
5311 HOST_WIDE_INT n;
5313 c = TREE_REAL_CST (arg1);
5314 n = real_to_integer (&c);
5316 if ((n & 1) == 0)
5318 REAL_VALUE_TYPE cint;
5319 real_from_integer (&cint, VOIDmode, n, SIGNED);
5320 if (real_identical (&c, &cint))
5321 return true;
5325 break;
5327 default:
5328 return false;
5333 return false;
5336 /* Given a pointer value OP0, return a simplified version of an
5337 indirection through OP0, or NULL_TREE if no simplification is
5338 possible. Note that the resulting type may be different from
5339 the type pointed to in the sense that it is still compatible
5340 from the langhooks point of view. */
5342 tree
5343 gimple_fold_indirect_ref (tree t)
5345 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5346 tree sub = t;
5347 tree subtype;
5349 STRIP_NOPS (sub);
5350 subtype = TREE_TYPE (sub);
5351 if (!POINTER_TYPE_P (subtype))
5352 return NULL_TREE;
5354 if (TREE_CODE (sub) == ADDR_EXPR)
5356 tree op = TREE_OPERAND (sub, 0);
5357 tree optype = TREE_TYPE (op);
5358 /* *&p => p */
5359 if (useless_type_conversion_p (type, optype))
5360 return op;
5362 /* *(foo *)&fooarray => fooarray[0] */
5363 if (TREE_CODE (optype) == ARRAY_TYPE
5364 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5365 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5367 tree type_domain = TYPE_DOMAIN (optype);
5368 tree min_val = size_zero_node;
5369 if (type_domain && TYPE_MIN_VALUE (type_domain))
5370 min_val = TYPE_MIN_VALUE (type_domain);
5371 if (TREE_CODE (min_val) == INTEGER_CST)
5372 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5374 /* *(foo *)&complexfoo => __real__ complexfoo */
5375 else if (TREE_CODE (optype) == COMPLEX_TYPE
5376 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5377 return fold_build1 (REALPART_EXPR, type, op);
5378 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5379 else if (TREE_CODE (optype) == VECTOR_TYPE
5380 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5382 tree part_width = TYPE_SIZE (type);
5383 tree index = bitsize_int (0);
5384 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5388 /* *(p + CST) -> ... */
5389 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5390 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5392 tree addr = TREE_OPERAND (sub, 0);
5393 tree off = TREE_OPERAND (sub, 1);
5394 tree addrtype;
5396 STRIP_NOPS (addr);
5397 addrtype = TREE_TYPE (addr);
5399 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5400 if (TREE_CODE (addr) == ADDR_EXPR
5401 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5402 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5403 && tree_fits_uhwi_p (off))
5405 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5406 tree part_width = TYPE_SIZE (type);
5407 unsigned HOST_WIDE_INT part_widthi
5408 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5409 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5410 tree index = bitsize_int (indexi);
5411 if (offset / part_widthi
5412 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5413 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5414 part_width, index);
5417 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5418 if (TREE_CODE (addr) == ADDR_EXPR
5419 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5420 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5422 tree size = TYPE_SIZE_UNIT (type);
5423 if (tree_int_cst_equal (size, off))
5424 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5427 /* *(p + CST) -> MEM_REF <p, CST>. */
5428 if (TREE_CODE (addr) != ADDR_EXPR
5429 || DECL_P (TREE_OPERAND (addr, 0)))
5430 return fold_build2 (MEM_REF, type,
5431 addr,
5432 wide_int_to_tree (ptype, off));
5435 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5436 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5437 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5438 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5440 tree type_domain;
5441 tree min_val = size_zero_node;
5442 tree osub = sub;
5443 sub = gimple_fold_indirect_ref (sub);
5444 if (! sub)
5445 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5446 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5447 if (type_domain && TYPE_MIN_VALUE (type_domain))
5448 min_val = TYPE_MIN_VALUE (type_domain);
5449 if (TREE_CODE (min_val) == INTEGER_CST)
5450 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5453 return NULL_TREE;
5456 /* Return true if CODE is an operation that when operating on signed
5457 integer types involves undefined behavior on overflow and the
5458 operation can be expressed with unsigned arithmetic. */
5460 bool
5461 arith_code_with_undefined_signed_overflow (tree_code code)
5463 switch (code)
5465 case PLUS_EXPR:
5466 case MINUS_EXPR:
5467 case MULT_EXPR:
5468 case NEGATE_EXPR:
5469 case POINTER_PLUS_EXPR:
5470 return true;
5471 default:
5472 return false;
5476 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5477 operation that can be transformed to unsigned arithmetic by converting
5478 its operand, carrying out the operation in the corresponding unsigned
5479 type and converting the result back to the original type.
5481 Returns a sequence of statements that replace STMT and also contain
5482 a modified form of STMT itself. */
5484 gimple_seq
5485 rewrite_to_defined_overflow (gimple stmt)
5487 if (dump_file && (dump_flags & TDF_DETAILS))
5489 fprintf (dump_file, "rewriting stmt with undefined signed "
5490 "overflow ");
5491 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5494 tree lhs = gimple_assign_lhs (stmt);
5495 tree type = unsigned_type_for (TREE_TYPE (lhs));
5496 gimple_seq stmts = NULL;
5497 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5499 gimple_seq stmts2 = NULL;
5500 gimple_set_op (stmt, i,
5501 force_gimple_operand (fold_convert (type,
5502 gimple_op (stmt, i)),
5503 &stmts2, true, NULL_TREE));
5504 gimple_seq_add_seq (&stmts, stmts2);
5506 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5507 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5508 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5509 gimple_seq_add_stmt (&stmts, stmt);
5510 gimple cvt = gimple_build_assign_with_ops
5511 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
5512 gimple_seq_add_stmt (&stmts, cvt);
5514 return stmts;
5518 /* Build the expression CODE OP0 of type TYPE with location LOC,
5519 simplifying it first if possible using VALUEIZE if not NULL.
5520 OP0 is expected to be valueized already. Returns the built
5521 expression value and appends statements possibly defining it
5522 to SEQ. */
5524 tree
5525 gimple_build (gimple_seq *seq, location_t loc,
5526 enum tree_code code, tree type, tree op0,
5527 tree (*valueize)(tree))
5529 tree res = gimple_simplify (code, type, op0, seq, valueize);
5530 if (!res)
5532 if (gimple_in_ssa_p (cfun))
5533 res = make_ssa_name (type, NULL);
5534 else
5535 res = create_tmp_reg (type, NULL);
5536 gimple stmt;
5537 if (code == REALPART_EXPR
5538 || code == IMAGPART_EXPR
5539 || code == VIEW_CONVERT_EXPR)
5540 stmt = gimple_build_assign_with_ops (code, res,
5541 build1 (code, type,
5542 op0), NULL_TREE);
5543 else
5544 stmt = gimple_build_assign_with_ops (code, res, op0, NULL_TREE);
5545 gimple_set_location (stmt, loc);
5546 gimple_seq_add_stmt_without_update (seq, stmt);
5548 return res;
5551 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5552 simplifying it first if possible using VALUEIZE if not NULL.
5553 OP0 and OP1 are expected to be valueized already. Returns the built
5554 expression value and appends statements possibly defining it
5555 to SEQ. */
5557 tree
5558 gimple_build (gimple_seq *seq, location_t loc,
5559 enum tree_code code, tree type, tree op0, tree op1,
5560 tree (*valueize)(tree))
5562 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
5563 if (!res)
5565 if (gimple_in_ssa_p (cfun))
5566 res = make_ssa_name (type, NULL);
5567 else
5568 res = create_tmp_reg (type, NULL);
5569 gimple stmt = gimple_build_assign_with_ops (code, res, op0, op1);
5570 gimple_set_location (stmt, loc);
5571 gimple_seq_add_stmt_without_update (seq, stmt);
5573 return res;
5576 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5577 simplifying it first if possible using VALUEIZE if not NULL.
5578 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
5579 expression value and appends statements possibly defining it
5580 to SEQ. */
5582 tree
5583 gimple_build (gimple_seq *seq, location_t loc,
5584 enum tree_code code, tree type, tree op0, tree op1, tree op2,
5585 tree (*valueize)(tree))
5587 tree res = gimple_simplify (code, type, op0, op1, op2,
5588 seq, valueize);
5589 if (!res)
5591 if (gimple_in_ssa_p (cfun))
5592 res = make_ssa_name (type, NULL);
5593 else
5594 res = create_tmp_reg (type, NULL);
5595 gimple stmt;
5596 if (code == BIT_FIELD_REF)
5597 stmt = gimple_build_assign_with_ops (code, res,
5598 build3 (BIT_FIELD_REF, type,
5599 op0, op1, op2),
5600 NULL_TREE);
5601 else
5602 stmt = gimple_build_assign_with_ops (code, res, op0, op1, op2);
5603 gimple_set_location (stmt, loc);
5604 gimple_seq_add_stmt_without_update (seq, stmt);
5606 return res;
5609 /* Build the call FN (ARG0) with a result of type TYPE
5610 (or no result if TYPE is void) with location LOC,
5611 simplifying it first if possible using VALUEIZE if not NULL.
5612 ARG0 is expected to be valueized already. Returns the built
5613 expression value (or NULL_TREE if TYPE is void) and appends
5614 statements possibly defining it to SEQ. */
5616 tree
5617 gimple_build (gimple_seq *seq, location_t loc,
5618 enum built_in_function fn, tree type, tree arg0,
5619 tree (*valueize)(tree))
5621 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
5622 if (!res)
5624 tree decl = builtin_decl_implicit (fn);
5625 gimple stmt = gimple_build_call (decl, 1, arg0);
5626 if (!VOID_TYPE_P (type))
5628 if (gimple_in_ssa_p (cfun))
5629 res = make_ssa_name (type, NULL);
5630 else
5631 res = create_tmp_reg (type, NULL);
5632 gimple_call_set_lhs (stmt, res);
5634 gimple_set_location (stmt, loc);
5635 gimple_seq_add_stmt_without_update (seq, stmt);
5637 return res;
5640 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
5641 (or no result if TYPE is void) with location LOC,
5642 simplifying it first if possible using VALUEIZE if not NULL.
5643 ARG0 is expected to be valueized already. Returns the built
5644 expression value (or NULL_TREE if TYPE is void) and appends
5645 statements possibly defining it to SEQ. */
5647 tree
5648 gimple_build (gimple_seq *seq, location_t loc,
5649 enum built_in_function fn, tree type, tree arg0, tree arg1,
5650 tree (*valueize)(tree))
5652 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
5653 if (!res)
5655 tree decl = builtin_decl_implicit (fn);
5656 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
5657 if (!VOID_TYPE_P (type))
5659 if (gimple_in_ssa_p (cfun))
5660 res = make_ssa_name (type, NULL);
5661 else
5662 res = create_tmp_reg (type, NULL);
5663 gimple_call_set_lhs (stmt, res);
5665 gimple_set_location (stmt, loc);
5666 gimple_seq_add_stmt_without_update (seq, stmt);
5668 return res;
5671 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
5672 (or no result if TYPE is void) with location LOC,
5673 simplifying it first if possible using VALUEIZE if not NULL.
5674 ARG0 is expected to be valueized already. Returns the built
5675 expression value (or NULL_TREE if TYPE is void) and appends
5676 statements possibly defining it to SEQ. */
5678 tree
5679 gimple_build (gimple_seq *seq, location_t loc,
5680 enum built_in_function fn, tree type,
5681 tree arg0, tree arg1, tree arg2,
5682 tree (*valueize)(tree))
5684 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
5685 if (!res)
5687 tree decl = builtin_decl_implicit (fn);
5688 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
5689 if (!VOID_TYPE_P (type))
5691 if (gimple_in_ssa_p (cfun))
5692 res = make_ssa_name (type, NULL);
5693 else
5694 res = create_tmp_reg (type, NULL);
5695 gimple_call_set_lhs (stmt, res);
5697 gimple_set_location (stmt, loc);
5698 gimple_seq_add_stmt_without_update (seq, stmt);
5700 return res;
5703 /* Build the conversion (TYPE) OP with a result of type TYPE
5704 with location LOC if such conversion is neccesary in GIMPLE,
5705 simplifying it first.
5706 Returns the built expression value and appends
5707 statements possibly defining it to SEQ. */
5709 tree
5710 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
5712 if (useless_type_conversion_p (type, TREE_TYPE (op)))
5713 return op;
5714 return gimple_build (seq, loc, NOP_EXPR, type, op);