Merge from trunk @222673.
[official-gcc.git] / gcc / gimple-fold.c
blob61df1e4b503b9fef11fd62747aea966b26544279
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "hashtab.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "rtl.h"
41 #include "flags.h"
42 #include "statistics.h"
43 #include "real.h"
44 #include "fixed-value.h"
45 #include "insn-config.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "calls.h"
50 #include "emit-rtl.h"
51 #include "varasm.h"
52 #include "stmt.h"
53 #include "expr.h"
54 #include "stor-layout.h"
55 #include "dumpfile.h"
56 #include "bitmap.h"
57 #include "predict.h"
58 #include "dominance.h"
59 #include "basic-block.h"
60 #include "tree-ssa-alias.h"
61 #include "internal-fn.h"
62 #include "gimple-fold.h"
63 #include "gimple-expr.h"
64 #include "is-a.h"
65 #include "gimple.h"
66 #include "gimplify.h"
67 #include "gimple-iterator.h"
68 #include "gimple-ssa.h"
69 #include "tree-ssanames.h"
70 #include "tree-into-ssa.h"
71 #include "tree-dfa.h"
72 #include "tree-ssa.h"
73 #include "tree-ssa-propagate.h"
74 #include "target.h"
75 #include "hash-map.h"
76 #include "plugin-api.h"
77 #include "ipa-ref.h"
78 #include "cgraph.h"
79 #include "ipa-utils.h"
80 #include "gimple-pretty-print.h"
81 #include "tree-ssa-address.h"
82 #include "langhooks.h"
83 #include "gimplify-me.h"
84 #include "dbgcnt.h"
85 #include "builtins.h"
86 #include "output.h"
87 #include "tree-eh.h"
88 #include "gimple-match.h"
89 #include "tree-phinodes.h"
90 #include "ssa-iterators.h"
92 /* Return true when DECL can be referenced from current unit.
93 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
94 We can get declarations that are not possible to reference for various
95 reasons:
97 1) When analyzing C++ virtual tables.
98 C++ virtual tables do have known constructors even
99 when they are keyed to other compilation unit.
100 Those tables can contain pointers to methods and vars
101 in other units. Those methods have both STATIC and EXTERNAL
102 set.
103 2) In WHOPR mode devirtualization might lead to reference
104 to method that was partitioned elsehwere.
105 In this case we have static VAR_DECL or FUNCTION_DECL
106 that has no corresponding callgraph/varpool node
107 declaring the body.
108 3) COMDAT functions referred by external vtables that
109 we devirtualize only during final compilation stage.
110 At this time we already decided that we will not output
111 the function body and thus we can't reference the symbol
112 directly. */
114 static bool
115 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
117 varpool_node *vnode;
118 struct cgraph_node *node;
119 symtab_node *snode;
121 if (DECL_ABSTRACT_P (decl))
122 return false;
124 /* We are concerned only about static/external vars and functions. */
125 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
126 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
127 return true;
129 /* Static objects can be referred only if they was not optimized out yet. */
130 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
132 /* Before we start optimizing unreachable code we can be sure all
133 static objects are defined. */
134 if (symtab->function_flags_ready)
135 return true;
136 snode = symtab_node::get (decl);
137 if (!snode || !snode->definition)
138 return false;
139 node = dyn_cast <cgraph_node *> (snode);
140 return !node || !node->global.inlined_to;
143 /* We will later output the initializer, so we can refer to it.
144 So we are concerned only when DECL comes from initializer of
145 external var or var that has been optimized out. */
146 if (!from_decl
147 || TREE_CODE (from_decl) != VAR_DECL
148 || (!DECL_EXTERNAL (from_decl)
149 && (vnode = varpool_node::get (from_decl)) != NULL
150 && vnode->definition)
151 || (flag_ltrans
152 && (vnode = varpool_node::get (from_decl)) != NULL
153 && vnode->in_other_partition))
154 return true;
155 /* We are folding reference from external vtable. The vtable may reffer
156 to a symbol keyed to other compilation unit. The other compilation
157 unit may be in separate DSO and the symbol may be hidden. */
158 if (DECL_VISIBILITY_SPECIFIED (decl)
159 && DECL_EXTERNAL (decl)
160 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
161 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
162 return false;
163 /* When function is public, we always can introduce new reference.
164 Exception are the COMDAT functions where introducing a direct
165 reference imply need to include function body in the curren tunit. */
166 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
167 return true;
168 /* We have COMDAT. We are going to check if we still have definition
169 or if the definition is going to be output in other partition.
170 Bypass this when gimplifying; all needed functions will be produced.
172 As observed in PR20991 for already optimized out comdat virtual functions
173 it may be tempting to not necessarily give up because the copy will be
174 output elsewhere when corresponding vtable is output.
175 This is however not possible - ABI specify that COMDATs are output in
176 units where they are used and when the other unit was compiled with LTO
177 it is possible that vtable was kept public while the function itself
178 was privatized. */
179 if (!symtab->function_flags_ready)
180 return true;
182 snode = symtab_node::get (decl);
183 if (!snode
184 || ((!snode->definition || DECL_EXTERNAL (decl))
185 && (!snode->in_other_partition
186 || (!snode->forced_by_abi && !snode->force_output))))
187 return false;
188 node = dyn_cast <cgraph_node *> (snode);
189 return !node || !node->global.inlined_to;
192 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
193 acceptable form for is_gimple_min_invariant.
194 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
196 tree
197 canonicalize_constructor_val (tree cval, tree from_decl)
199 tree orig_cval = cval;
200 STRIP_NOPS (cval);
201 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
202 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
204 tree ptr = TREE_OPERAND (cval, 0);
205 if (is_gimple_min_invariant (ptr))
206 cval = build1_loc (EXPR_LOCATION (cval),
207 ADDR_EXPR, TREE_TYPE (ptr),
208 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
209 ptr,
210 fold_convert (ptr_type_node,
211 TREE_OPERAND (cval, 1))));
213 if (TREE_CODE (cval) == ADDR_EXPR)
215 tree base = NULL_TREE;
216 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
218 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
219 if (base)
220 TREE_OPERAND (cval, 0) = base;
222 else
223 base = get_base_address (TREE_OPERAND (cval, 0));
224 if (!base)
225 return NULL_TREE;
227 if ((TREE_CODE (base) == VAR_DECL
228 || TREE_CODE (base) == FUNCTION_DECL)
229 && !can_refer_decl_in_current_unit_p (base, from_decl))
230 return NULL_TREE;
231 if (TREE_CODE (base) == VAR_DECL)
232 TREE_ADDRESSABLE (base) = 1;
233 else if (TREE_CODE (base) == FUNCTION_DECL)
235 /* Make sure we create a cgraph node for functions we'll reference.
236 They can be non-existent if the reference comes from an entry
237 of an external vtable for example. */
238 cgraph_node::get_create (base);
240 /* Fixup types in global initializers. */
241 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
242 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
244 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
245 cval = fold_convert (TREE_TYPE (orig_cval), cval);
246 return cval;
248 if (TREE_OVERFLOW_P (cval))
249 return drop_tree_overflow (cval);
250 return orig_cval;
253 /* If SYM is a constant variable with known value, return the value.
254 NULL_TREE is returned otherwise. */
256 tree
257 get_symbol_constant_value (tree sym)
259 tree val = ctor_for_folding (sym);
260 if (val != error_mark_node)
262 if (val)
264 val = canonicalize_constructor_val (unshare_expr (val), sym);
265 if (val && is_gimple_min_invariant (val))
266 return val;
267 else
268 return NULL_TREE;
270 /* Variables declared 'const' without an initializer
271 have zero as the initializer if they may not be
272 overridden at link or run time. */
273 if (!val
274 && is_gimple_reg_type (TREE_TYPE (sym)))
275 return build_zero_cst (TREE_TYPE (sym));
278 return NULL_TREE;
283 /* Subroutine of fold_stmt. We perform several simplifications of the
284 memory reference tree EXPR and make sure to re-gimplify them properly
285 after propagation of constant addresses. IS_LHS is true if the
286 reference is supposed to be an lvalue. */
288 static tree
289 maybe_fold_reference (tree expr, bool is_lhs)
291 tree result;
293 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
294 || TREE_CODE (expr) == REALPART_EXPR
295 || TREE_CODE (expr) == IMAGPART_EXPR)
296 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
297 return fold_unary_loc (EXPR_LOCATION (expr),
298 TREE_CODE (expr),
299 TREE_TYPE (expr),
300 TREE_OPERAND (expr, 0));
301 else if (TREE_CODE (expr) == BIT_FIELD_REF
302 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
303 return fold_ternary_loc (EXPR_LOCATION (expr),
304 TREE_CODE (expr),
305 TREE_TYPE (expr),
306 TREE_OPERAND (expr, 0),
307 TREE_OPERAND (expr, 1),
308 TREE_OPERAND (expr, 2));
310 if (!is_lhs
311 && (result = fold_const_aggregate_ref (expr))
312 && is_gimple_min_invariant (result))
313 return result;
315 return NULL_TREE;
319 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
320 replacement rhs for the statement or NULL_TREE if no simplification
321 could be made. It is assumed that the operands have been previously
322 folded. */
324 static tree
325 fold_gimple_assign (gimple_stmt_iterator *si)
327 gimple stmt = gsi_stmt (*si);
328 enum tree_code subcode = gimple_assign_rhs_code (stmt);
329 location_t loc = gimple_location (stmt);
331 tree result = NULL_TREE;
333 switch (get_gimple_rhs_class (subcode))
335 case GIMPLE_SINGLE_RHS:
337 tree rhs = gimple_assign_rhs1 (stmt);
339 if (TREE_CLOBBER_P (rhs))
340 return NULL_TREE;
342 if (REFERENCE_CLASS_P (rhs))
343 return maybe_fold_reference (rhs, false);
345 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
347 tree val = OBJ_TYPE_REF_EXPR (rhs);
348 if (is_gimple_min_invariant (val))
349 return val;
350 else if (flag_devirtualize && virtual_method_call_p (rhs))
352 bool final;
353 vec <cgraph_node *>targets
354 = possible_polymorphic_call_targets (rhs, stmt, &final);
355 if (final && targets.length () <= 1 && dbg_cnt (devirt))
357 if (dump_enabled_p ())
359 location_t loc = gimple_location_safe (stmt);
360 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
361 "resolving virtual function address "
362 "reference to function %s\n",
363 targets.length () == 1
364 ? targets[0]->name ()
365 : "NULL");
367 if (targets.length () == 1)
369 val = fold_convert (TREE_TYPE (val),
370 build_fold_addr_expr_loc
371 (loc, targets[0]->decl));
372 STRIP_USELESS_TYPE_CONVERSION (val);
374 else
375 /* We can not use __builtin_unreachable here because it
376 can not have address taken. */
377 val = build_int_cst (TREE_TYPE (val), 0);
378 return val;
383 else if (TREE_CODE (rhs) == ADDR_EXPR)
385 tree ref = TREE_OPERAND (rhs, 0);
386 tree tem = maybe_fold_reference (ref, true);
387 if (tem
388 && TREE_CODE (tem) == MEM_REF
389 && integer_zerop (TREE_OPERAND (tem, 1)))
390 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
391 else if (tem)
392 result = fold_convert (TREE_TYPE (rhs),
393 build_fold_addr_expr_loc (loc, tem));
394 else if (TREE_CODE (ref) == MEM_REF
395 && integer_zerop (TREE_OPERAND (ref, 1)))
396 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
399 else if (TREE_CODE (rhs) == CONSTRUCTOR
400 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
401 && (CONSTRUCTOR_NELTS (rhs)
402 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
409 if (TREE_CODE (val) != INTEGER_CST
410 && TREE_CODE (val) != REAL_CST
411 && TREE_CODE (val) != FIXED_CST)
412 return NULL_TREE;
414 return build_vector_from_ctor (TREE_TYPE (rhs),
415 CONSTRUCTOR_ELTS (rhs));
418 else if (DECL_P (rhs))
419 return get_symbol_constant_value (rhs);
421 /* If we couldn't fold the RHS, hand over to the generic
422 fold routines. */
423 if (result == NULL_TREE)
424 result = fold (rhs);
426 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
427 that may have been added by fold, and "useless" type
428 conversions that might now be apparent due to propagation. */
429 STRIP_USELESS_TYPE_CONVERSION (result);
431 if (result != rhs && valid_gimple_rhs_p (result))
432 return result;
434 return NULL_TREE;
436 break;
438 case GIMPLE_UNARY_RHS:
439 break;
441 case GIMPLE_BINARY_RHS:
442 /* Try to canonicalize for boolean-typed X the comparisons
443 X == 0, X == 1, X != 0, and X != 1. */
444 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
445 || gimple_assign_rhs_code (stmt) == NE_EXPR)
447 tree lhs = gimple_assign_lhs (stmt);
448 tree op1 = gimple_assign_rhs1 (stmt);
449 tree op2 = gimple_assign_rhs2 (stmt);
450 tree type = TREE_TYPE (op1);
452 /* Check whether the comparison operands are of the same boolean
453 type as the result type is.
454 Check that second operand is an integer-constant with value
455 one or zero. */
456 if (TREE_CODE (op2) == INTEGER_CST
457 && (integer_zerop (op2) || integer_onep (op2))
458 && useless_type_conversion_p (TREE_TYPE (lhs), type))
460 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
461 bool is_logical_not = false;
463 /* X == 0 and X != 1 is a logical-not.of X
464 X == 1 and X != 0 is X */
465 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
466 || (cmp_code == NE_EXPR && integer_onep (op2)))
467 is_logical_not = true;
469 if (is_logical_not == false)
470 result = op1;
471 /* Only for one-bit precision typed X the transformation
472 !X -> ~X is valied. */
473 else if (TYPE_PRECISION (type) == 1)
474 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
475 type, op1);
476 /* Otherwise we use !X -> X ^ 1. */
477 else
478 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
479 type, op1, build_int_cst (type, 1));
484 if (!result)
485 result = fold_binary_loc (loc, subcode,
486 TREE_TYPE (gimple_assign_lhs (stmt)),
487 gimple_assign_rhs1 (stmt),
488 gimple_assign_rhs2 (stmt));
490 if (result)
492 STRIP_USELESS_TYPE_CONVERSION (result);
493 if (valid_gimple_rhs_p (result))
494 return result;
496 break;
498 case GIMPLE_TERNARY_RHS:
499 /* Try to fold a conditional expression. */
500 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
502 tree op0 = gimple_assign_rhs1 (stmt);
503 tree tem;
504 bool set = false;
505 location_t cond_loc = gimple_location (stmt);
507 if (COMPARISON_CLASS_P (op0))
509 fold_defer_overflow_warnings ();
510 tem = fold_binary_loc (cond_loc,
511 TREE_CODE (op0), TREE_TYPE (op0),
512 TREE_OPERAND (op0, 0),
513 TREE_OPERAND (op0, 1));
514 /* This is actually a conditional expression, not a GIMPLE
515 conditional statement, however, the valid_gimple_rhs_p
516 test still applies. */
517 set = (tem && is_gimple_condexpr (tem)
518 && valid_gimple_rhs_p (tem));
519 fold_undefer_overflow_warnings (set, stmt, 0);
521 else if (is_gimple_min_invariant (op0))
523 tem = op0;
524 set = true;
526 else
527 return NULL_TREE;
529 if (set)
530 result = fold_build3_loc (cond_loc, COND_EXPR,
531 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
532 gimple_assign_rhs2 (stmt),
533 gimple_assign_rhs3 (stmt));
536 if (!result)
537 result = fold_ternary_loc (loc, subcode,
538 TREE_TYPE (gimple_assign_lhs (stmt)),
539 gimple_assign_rhs1 (stmt),
540 gimple_assign_rhs2 (stmt),
541 gimple_assign_rhs3 (stmt));
543 if (result)
545 STRIP_USELESS_TYPE_CONVERSION (result);
546 if (valid_gimple_rhs_p (result))
547 return result;
549 break;
551 case GIMPLE_INVALID_RHS:
552 gcc_unreachable ();
555 return NULL_TREE;
558 /* Attempt to fold a conditional statement. Return true if any changes were
559 made. We only attempt to fold the condition expression, and do not perform
560 any transformation that would require alteration of the cfg. It is
561 assumed that the operands have been previously folded. */
563 static bool
564 fold_gimple_cond (gcond *stmt)
566 tree result = fold_binary_loc (gimple_location (stmt),
567 gimple_cond_code (stmt),
568 boolean_type_node,
569 gimple_cond_lhs (stmt),
570 gimple_cond_rhs (stmt));
572 if (result)
574 STRIP_USELESS_TYPE_CONVERSION (result);
575 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
577 gimple_cond_set_condition_from_tree (stmt, result);
578 return true;
582 return false;
586 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
587 adjusting the replacement stmts location and virtual operands.
588 If the statement has a lhs the last stmt in the sequence is expected
589 to assign to that lhs. */
591 static void
592 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
594 gimple stmt = gsi_stmt (*si_p);
596 if (gimple_has_location (stmt))
597 annotate_all_with_location (stmts, gimple_location (stmt));
599 /* First iterate over the replacement statements backward, assigning
600 virtual operands to their defining statements. */
601 gimple laststore = NULL;
602 for (gimple_stmt_iterator i = gsi_last (stmts);
603 !gsi_end_p (i); gsi_prev (&i))
605 gimple new_stmt = gsi_stmt (i);
606 if ((gimple_assign_single_p (new_stmt)
607 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
608 || (is_gimple_call (new_stmt)
609 && (gimple_call_flags (new_stmt)
610 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
612 tree vdef;
613 if (!laststore)
614 vdef = gimple_vdef (stmt);
615 else
616 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
617 gimple_set_vdef (new_stmt, vdef);
618 if (vdef && TREE_CODE (vdef) == SSA_NAME)
619 SSA_NAME_DEF_STMT (vdef) = new_stmt;
620 laststore = new_stmt;
624 /* Second iterate over the statements forward, assigning virtual
625 operands to their uses. */
626 tree reaching_vuse = gimple_vuse (stmt);
627 for (gimple_stmt_iterator i = gsi_start (stmts);
628 !gsi_end_p (i); gsi_next (&i))
630 gimple new_stmt = gsi_stmt (i);
631 /* If the new statement possibly has a VUSE, update it with exact SSA
632 name we know will reach this one. */
633 if (gimple_has_mem_ops (new_stmt))
634 gimple_set_vuse (new_stmt, reaching_vuse);
635 gimple_set_modified (new_stmt, true);
636 if (gimple_vdef (new_stmt))
637 reaching_vuse = gimple_vdef (new_stmt);
640 /* If the new sequence does not do a store release the virtual
641 definition of the original statement. */
642 if (reaching_vuse
643 && reaching_vuse == gimple_vuse (stmt))
645 tree vdef = gimple_vdef (stmt);
646 if (vdef
647 && TREE_CODE (vdef) == SSA_NAME)
649 unlink_stmt_vdef (stmt);
650 release_ssa_name (vdef);
654 /* Finally replace the original statement with the sequence. */
655 gsi_replace_with_seq (si_p, stmts, false);
658 /* Convert EXPR into a GIMPLE value suitable for substitution on the
659 RHS of an assignment. Insert the necessary statements before
660 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
661 is replaced. If the call is expected to produces a result, then it
662 is replaced by an assignment of the new RHS to the result variable.
663 If the result is to be ignored, then the call is replaced by a
664 GIMPLE_NOP. A proper VDEF chain is retained by making the first
665 VUSE and the last VDEF of the whole sequence be the same as the replaced
666 statement and using new SSA names for stores in between. */
668 void
669 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
671 tree lhs;
672 gimple stmt, new_stmt;
673 gimple_stmt_iterator i;
674 gimple_seq stmts = NULL;
676 stmt = gsi_stmt (*si_p);
678 gcc_assert (is_gimple_call (stmt));
680 push_gimplify_context (gimple_in_ssa_p (cfun));
682 lhs = gimple_call_lhs (stmt);
683 if (lhs == NULL_TREE)
685 gimplify_and_add (expr, &stmts);
686 /* We can end up with folding a memcpy of an empty class assignment
687 which gets optimized away by C++ gimplification. */
688 if (gimple_seq_empty_p (stmts))
690 pop_gimplify_context (NULL);
691 if (gimple_in_ssa_p (cfun))
693 unlink_stmt_vdef (stmt);
694 release_defs (stmt);
696 gsi_replace (si_p, gimple_build_nop (), true);
697 return;
700 else
702 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
703 new_stmt = gimple_build_assign (lhs, tmp);
704 i = gsi_last (stmts);
705 gsi_insert_after_without_update (&i, new_stmt,
706 GSI_CONTINUE_LINKING);
709 pop_gimplify_context (NULL);
711 gsi_replace_with_seq_vops (si_p, stmts);
715 /* Replace the call at *GSI with the gimple value VAL. */
717 static void
718 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
720 gimple stmt = gsi_stmt (*gsi);
721 tree lhs = gimple_call_lhs (stmt);
722 gimple repl;
723 if (lhs)
725 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
726 val = fold_convert (TREE_TYPE (lhs), val);
727 repl = gimple_build_assign (lhs, val);
729 else
730 repl = gimple_build_nop ();
731 tree vdef = gimple_vdef (stmt);
732 if (vdef && TREE_CODE (vdef) == SSA_NAME)
734 unlink_stmt_vdef (stmt);
735 release_ssa_name (vdef);
737 gsi_replace (gsi, repl, true);
740 /* Replace the call at *GSI with the new call REPL and fold that
741 again. */
743 static void
744 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
746 gimple stmt = gsi_stmt (*gsi);
747 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
748 gimple_set_location (repl, gimple_location (stmt));
749 if (gimple_vdef (stmt)
750 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
752 gimple_set_vdef (repl, gimple_vdef (stmt));
753 gimple_set_vuse (repl, gimple_vuse (stmt));
754 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
756 gsi_replace (gsi, repl, true);
757 fold_stmt (gsi);
760 /* Return true if VAR is a VAR_DECL or a component thereof. */
762 static bool
763 var_decl_component_p (tree var)
765 tree inner = var;
766 while (handled_component_p (inner))
767 inner = TREE_OPERAND (inner, 0);
768 return SSA_VAR_P (inner);
771 /* Fold function call to builtin mem{{,p}cpy,move}. Return
772 false if no simplification can be made.
773 If ENDP is 0, return DEST (like memcpy).
774 If ENDP is 1, return DEST+LEN (like mempcpy).
775 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
776 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
777 (memmove). */
779 static bool
780 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
781 tree dest, tree src, int endp)
783 gimple stmt = gsi_stmt (*gsi);
784 tree lhs = gimple_call_lhs (stmt);
785 tree len = gimple_call_arg (stmt, 2);
786 tree destvar, srcvar;
787 location_t loc = gimple_location (stmt);
789 /* If the LEN parameter is zero, return DEST. */
790 if (integer_zerop (len))
792 gimple repl;
793 if (gimple_call_lhs (stmt))
794 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
795 else
796 repl = gimple_build_nop ();
797 tree vdef = gimple_vdef (stmt);
798 if (vdef && TREE_CODE (vdef) == SSA_NAME)
800 unlink_stmt_vdef (stmt);
801 release_ssa_name (vdef);
803 gsi_replace (gsi, repl, true);
804 return true;
807 /* If SRC and DEST are the same (and not volatile), return
808 DEST{,+LEN,+LEN-1}. */
809 if (operand_equal_p (src, dest, 0))
811 unlink_stmt_vdef (stmt);
812 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
813 release_ssa_name (gimple_vdef (stmt));
814 if (!lhs)
816 gsi_replace (gsi, gimple_build_nop (), true);
817 return true;
819 goto done;
821 else
823 tree srctype, desttype;
824 unsigned int src_align, dest_align;
825 tree off0;
827 /* Build accesses at offset zero with a ref-all character type. */
828 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
829 ptr_mode, true), 0);
831 /* If we can perform the copy efficiently with first doing all loads
832 and then all stores inline it that way. Currently efficiently
833 means that we can load all the memory into a single integer
834 register which is what MOVE_MAX gives us. */
835 src_align = get_pointer_alignment (src);
836 dest_align = get_pointer_alignment (dest);
837 if (tree_fits_uhwi_p (len)
838 && compare_tree_int (len, MOVE_MAX) <= 0
839 /* ??? Don't transform copies from strings with known length this
840 confuses the tree-ssa-strlen.c. This doesn't handle
841 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
842 reason. */
843 && !c_strlen (src, 2))
845 unsigned ilen = tree_to_uhwi (len);
846 if (exact_log2 (ilen) != -1)
848 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
849 if (type
850 && TYPE_MODE (type) != BLKmode
851 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
852 == ilen * 8)
853 /* If the destination pointer is not aligned we must be able
854 to emit an unaligned store. */
855 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
856 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
858 tree srctype = type;
859 tree desttype = type;
860 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
861 srctype = build_aligned_type (type, src_align);
862 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
863 tree tem = fold_const_aggregate_ref (srcmem);
864 if (tem)
865 srcmem = tem;
866 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
867 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
868 src_align))
869 srcmem = NULL_TREE;
870 if (srcmem)
872 gimple new_stmt;
873 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
875 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
876 if (gimple_in_ssa_p (cfun))
877 srcmem = make_ssa_name (TREE_TYPE (srcmem),
878 new_stmt);
879 else
880 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
881 gimple_assign_set_lhs (new_stmt, srcmem);
882 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
883 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
885 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
886 desttype = build_aligned_type (type, dest_align);
887 new_stmt
888 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
889 dest, off0),
890 srcmem);
891 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
892 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
893 if (gimple_vdef (new_stmt)
894 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
895 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
896 if (!lhs)
898 gsi_replace (gsi, new_stmt, true);
899 return true;
901 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
902 goto done;
908 if (endp == 3)
910 /* Both DEST and SRC must be pointer types.
911 ??? This is what old code did. Is the testing for pointer types
912 really mandatory?
914 If either SRC is readonly or length is 1, we can use memcpy. */
915 if (!dest_align || !src_align)
916 return false;
917 if (readonly_data_expr (src)
918 || (tree_fits_uhwi_p (len)
919 && (MIN (src_align, dest_align) / BITS_PER_UNIT
920 >= tree_to_uhwi (len))))
922 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
923 if (!fn)
924 return false;
925 gimple_call_set_fndecl (stmt, fn);
926 gimple_call_set_arg (stmt, 0, dest);
927 gimple_call_set_arg (stmt, 1, src);
928 fold_stmt (gsi);
929 return true;
932 /* If *src and *dest can't overlap, optimize into memcpy as well. */
933 if (TREE_CODE (src) == ADDR_EXPR
934 && TREE_CODE (dest) == ADDR_EXPR)
936 tree src_base, dest_base, fn;
937 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
938 HOST_WIDE_INT size = -1;
939 HOST_WIDE_INT maxsize = -1;
940 bool reverse;
942 srcvar = TREE_OPERAND (src, 0);
943 src_base = get_ref_base_and_extent (srcvar, &src_offset,
944 &size, &maxsize, &reverse);
945 destvar = TREE_OPERAND (dest, 0);
946 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
947 &size, &maxsize, &reverse);
948 if (tree_fits_uhwi_p (len))
949 maxsize = tree_to_uhwi (len);
950 else
951 maxsize = -1;
952 src_offset /= BITS_PER_UNIT;
953 dest_offset /= BITS_PER_UNIT;
954 if (SSA_VAR_P (src_base)
955 && SSA_VAR_P (dest_base))
957 if (operand_equal_p (src_base, dest_base, 0)
958 && ranges_overlap_p (src_offset, maxsize,
959 dest_offset, maxsize))
960 return false;
962 else if (TREE_CODE (src_base) == MEM_REF
963 && TREE_CODE (dest_base) == MEM_REF)
965 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
966 TREE_OPERAND (dest_base, 0), 0))
967 return false;
968 offset_int off = mem_ref_offset (src_base) + src_offset;
969 if (!wi::fits_shwi_p (off))
970 return false;
971 src_offset = off.to_shwi ();
973 off = mem_ref_offset (dest_base) + dest_offset;
974 if (!wi::fits_shwi_p (off))
975 return false;
976 dest_offset = off.to_shwi ();
977 if (ranges_overlap_p (src_offset, maxsize,
978 dest_offset, maxsize))
979 return false;
981 else
982 return false;
984 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
985 if (!fn)
986 return false;
987 gimple_call_set_fndecl (stmt, fn);
988 gimple_call_set_arg (stmt, 0, dest);
989 gimple_call_set_arg (stmt, 1, src);
990 fold_stmt (gsi);
991 return true;
994 /* If the destination and source do not alias optimize into
995 memcpy as well. */
996 if ((is_gimple_min_invariant (dest)
997 || TREE_CODE (dest) == SSA_NAME)
998 && (is_gimple_min_invariant (src)
999 || TREE_CODE (src) == SSA_NAME))
1001 ao_ref destr, srcr;
1002 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1003 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1004 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1006 tree fn;
1007 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1008 if (!fn)
1009 return false;
1010 gimple_call_set_fndecl (stmt, fn);
1011 gimple_call_set_arg (stmt, 0, dest);
1012 gimple_call_set_arg (stmt, 1, src);
1013 fold_stmt (gsi);
1014 return true;
1018 return false;
1021 if (!tree_fits_shwi_p (len))
1022 return false;
1023 /* FIXME:
1024 This logic lose for arguments like (type *)malloc (sizeof (type)),
1025 since we strip the casts of up to VOID return value from malloc.
1026 Perhaps we ought to inherit type from non-VOID argument here? */
1027 STRIP_NOPS (src);
1028 STRIP_NOPS (dest);
1029 if (!POINTER_TYPE_P (TREE_TYPE (src))
1030 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1031 return false;
1032 /* In the following try to find a type that is most natural to be
1033 used for the memcpy source and destination and that allows
1034 the most optimization when memcpy is turned into a plain assignment
1035 using that type. In theory we could always use a char[len] type
1036 but that only gains us that the destination and source possibly
1037 no longer will have their address taken. */
1038 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1039 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1041 tree tem = TREE_OPERAND (src, 0);
1042 STRIP_NOPS (tem);
1043 if (tem != TREE_OPERAND (src, 0))
1044 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1046 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1048 tree tem = TREE_OPERAND (dest, 0);
1049 STRIP_NOPS (tem);
1050 if (tem != TREE_OPERAND (dest, 0))
1051 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1053 srctype = TREE_TYPE (TREE_TYPE (src));
1054 if (TREE_CODE (srctype) == ARRAY_TYPE
1055 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1057 srctype = TREE_TYPE (srctype);
1058 STRIP_NOPS (src);
1059 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1061 desttype = TREE_TYPE (TREE_TYPE (dest));
1062 if (TREE_CODE (desttype) == ARRAY_TYPE
1063 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1065 desttype = TREE_TYPE (desttype);
1066 STRIP_NOPS (dest);
1067 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1069 if (TREE_ADDRESSABLE (srctype)
1070 || TREE_ADDRESSABLE (desttype))
1071 return false;
1073 /* Make sure we are not copying using a floating-point mode or
1074 a type whose size possibly does not match its precision. */
1075 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1076 || TREE_CODE (desttype) == BOOLEAN_TYPE
1077 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1078 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1079 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1080 || TREE_CODE (srctype) == BOOLEAN_TYPE
1081 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1082 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1083 if (!srctype)
1084 srctype = desttype;
1085 if (!desttype)
1086 desttype = srctype;
1087 if (!srctype)
1088 return false;
1090 src_align = get_pointer_alignment (src);
1091 dest_align = get_pointer_alignment (dest);
1092 if (dest_align < TYPE_ALIGN (desttype)
1093 || src_align < TYPE_ALIGN (srctype))
1094 return false;
1096 destvar = dest;
1097 STRIP_NOPS (destvar);
1098 if (TREE_CODE (destvar) == ADDR_EXPR
1099 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1100 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1101 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1102 else
1103 destvar = NULL_TREE;
1105 srcvar = src;
1106 STRIP_NOPS (srcvar);
1107 if (TREE_CODE (srcvar) == ADDR_EXPR
1108 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1109 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1111 if (!destvar
1112 || src_align >= TYPE_ALIGN (desttype))
1113 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1114 srcvar, off0);
1115 else if (!STRICT_ALIGNMENT)
1117 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1118 src_align);
1119 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1121 else
1122 srcvar = NULL_TREE;
1124 else
1125 srcvar = NULL_TREE;
1127 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1128 return false;
1130 if (srcvar == NULL_TREE)
1132 STRIP_NOPS (src);
1133 if (src_align >= TYPE_ALIGN (desttype))
1134 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1135 else
1137 if (STRICT_ALIGNMENT)
1138 return false;
1139 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1140 src_align);
1141 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1144 else if (destvar == NULL_TREE)
1146 STRIP_NOPS (dest);
1147 if (dest_align >= TYPE_ALIGN (srctype))
1148 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1149 else
1151 if (STRICT_ALIGNMENT)
1152 return false;
1153 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1154 dest_align);
1155 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1159 gimple new_stmt;
1160 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1162 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1163 if (gimple_in_ssa_p (cfun))
1164 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1165 else
1166 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1167 gimple_assign_set_lhs (new_stmt, srcvar);
1168 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1169 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1171 new_stmt = gimple_build_assign (destvar, srcvar);
1172 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1173 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1174 if (gimple_vdef (new_stmt)
1175 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1176 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1177 if (!lhs)
1179 gsi_replace (gsi, new_stmt, true);
1180 return true;
1182 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1185 done:
1186 if (endp == 0 || endp == 3)
1187 len = NULL_TREE;
1188 else if (endp == 2)
1189 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1190 ssize_int (1));
1191 if (endp == 2 || endp == 1)
1192 dest = fold_build_pointer_plus_loc (loc, dest, len);
1194 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1195 GSI_SAME_STMT);
1196 gimple repl = gimple_build_assign (lhs, dest);
1197 gsi_replace (gsi, repl, true);
1198 return true;
1201 /* Fold function call to builtin memset or bzero at *GSI setting the
1202 memory of size LEN to VAL. Return whether a simplification was made. */
1204 static bool
1205 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1207 gimple stmt = gsi_stmt (*gsi);
1208 tree etype;
1209 unsigned HOST_WIDE_INT length, cval;
1211 /* If the LEN parameter is zero, return DEST. */
1212 if (integer_zerop (len))
1214 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1215 return true;
1218 if (! tree_fits_uhwi_p (len))
1219 return false;
1221 if (TREE_CODE (c) != INTEGER_CST)
1222 return false;
1224 tree dest = gimple_call_arg (stmt, 0);
1225 tree var = dest;
1226 if (TREE_CODE (var) != ADDR_EXPR)
1227 return false;
1229 var = TREE_OPERAND (var, 0);
1230 if (TREE_THIS_VOLATILE (var))
1231 return false;
1233 etype = TREE_TYPE (var);
1234 if (TREE_CODE (etype) == ARRAY_TYPE)
1235 etype = TREE_TYPE (etype);
1237 if (!INTEGRAL_TYPE_P (etype)
1238 && !POINTER_TYPE_P (etype))
1239 return NULL_TREE;
1241 if (! var_decl_component_p (var))
1242 return NULL_TREE;
1244 length = tree_to_uhwi (len);
1245 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1246 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1247 return NULL_TREE;
1249 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1250 return NULL_TREE;
1252 if (integer_zerop (c))
1253 cval = 0;
1254 else
1256 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1257 return NULL_TREE;
1259 cval = TREE_INT_CST_LOW (c);
1260 cval &= 0xff;
1261 cval |= cval << 8;
1262 cval |= cval << 16;
1263 cval |= (cval << 31) << 1;
1266 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1267 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1268 gimple_set_vuse (store, gimple_vuse (stmt));
1269 tree vdef = gimple_vdef (stmt);
1270 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1272 gimple_set_vdef (store, gimple_vdef (stmt));
1273 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1275 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1276 if (gimple_call_lhs (stmt))
1278 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1279 gsi_replace (gsi, asgn, true);
1281 else
1283 gimple_stmt_iterator gsi2 = *gsi;
1284 gsi_prev (gsi);
1285 gsi_remove (&gsi2, true);
1288 return true;
1292 /* Return the string length, maximum string length or maximum value of
1293 ARG in LENGTH.
1294 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1295 is not NULL and, for TYPE == 0, its value is not equal to the length
1296 we determine or if we are unable to determine the length or value,
1297 return false. VISITED is a bitmap of visited variables.
1298 TYPE is 0 if string length should be returned, 1 for maximum string
1299 length and 2 for maximum value ARG can have. */
1301 static bool
1302 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1304 tree var, val;
1305 gimple def_stmt;
1307 if (TREE_CODE (arg) != SSA_NAME)
1309 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1310 if (TREE_CODE (arg) == ADDR_EXPR
1311 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1312 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1314 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1315 if (TREE_CODE (aop0) == INDIRECT_REF
1316 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1317 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1318 length, visited, type);
1321 if (type == 2)
1323 val = arg;
1324 if (TREE_CODE (val) != INTEGER_CST
1325 || tree_int_cst_sgn (val) < 0)
1326 return false;
1328 else
1329 val = c_strlen (arg, 1);
1330 if (!val)
1331 return false;
1333 if (*length)
1335 if (type > 0)
1337 if (TREE_CODE (*length) != INTEGER_CST
1338 || TREE_CODE (val) != INTEGER_CST)
1339 return false;
1341 if (tree_int_cst_lt (*length, val))
1342 *length = val;
1343 return true;
1345 else if (simple_cst_equal (val, *length) != 1)
1346 return false;
1349 *length = val;
1350 return true;
1353 /* If ARG is registered for SSA update we cannot look at its defining
1354 statement. */
1355 if (name_registered_for_update_p (arg))
1356 return false;
1358 /* If we were already here, break the infinite cycle. */
1359 if (!*visited)
1360 *visited = BITMAP_ALLOC (NULL);
1361 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1362 return true;
1364 var = arg;
1365 def_stmt = SSA_NAME_DEF_STMT (var);
1367 switch (gimple_code (def_stmt))
1369 case GIMPLE_ASSIGN:
1370 /* The RHS of the statement defining VAR must either have a
1371 constant length or come from another SSA_NAME with a constant
1372 length. */
1373 if (gimple_assign_single_p (def_stmt)
1374 || gimple_assign_unary_nop_p (def_stmt))
1376 tree rhs = gimple_assign_rhs1 (def_stmt);
1377 return get_maxval_strlen (rhs, length, visited, type);
1379 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1381 tree op2 = gimple_assign_rhs2 (def_stmt);
1382 tree op3 = gimple_assign_rhs3 (def_stmt);
1383 return get_maxval_strlen (op2, length, visited, type)
1384 && get_maxval_strlen (op3, length, visited, type);
1386 return false;
1388 case GIMPLE_PHI:
1390 /* All the arguments of the PHI node must have the same constant
1391 length. */
1392 unsigned i;
1394 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1396 tree arg = gimple_phi_arg (def_stmt, i)->def;
1398 /* If this PHI has itself as an argument, we cannot
1399 determine the string length of this argument. However,
1400 if we can find a constant string length for the other
1401 PHI args then we can still be sure that this is a
1402 constant string length. So be optimistic and just
1403 continue with the next argument. */
1404 if (arg == gimple_phi_result (def_stmt))
1405 continue;
1407 if (!get_maxval_strlen (arg, length, visited, type))
1408 return false;
1411 return true;
1413 default:
1414 return false;
1418 tree
1419 get_maxval_strlen (tree arg, int type)
1421 bitmap visited = NULL;
1422 tree len = NULL_TREE;
1423 if (!get_maxval_strlen (arg, &len, &visited, type))
1424 len = NULL_TREE;
1425 if (visited)
1426 BITMAP_FREE (visited);
1428 return len;
1432 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1433 If LEN is not NULL, it represents the length of the string to be
1434 copied. Return NULL_TREE if no simplification can be made. */
1436 static bool
1437 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1438 tree dest, tree src)
1440 location_t loc = gimple_location (gsi_stmt (*gsi));
1441 tree fn;
1443 /* If SRC and DEST are the same (and not volatile), return DEST. */
1444 if (operand_equal_p (src, dest, 0))
1446 replace_call_with_value (gsi, dest);
1447 return true;
1450 if (optimize_function_for_size_p (cfun))
1451 return false;
1453 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1454 if (!fn)
1455 return false;
1457 tree len = get_maxval_strlen (src, 0);
1458 if (!len)
1459 return false;
1461 len = fold_convert_loc (loc, size_type_node, len);
1462 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1463 len = force_gimple_operand_gsi (gsi, len, true,
1464 NULL_TREE, true, GSI_SAME_STMT);
1465 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1466 replace_call_with_call_and_fold (gsi, repl);
1467 return true;
1470 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1471 If SLEN is not NULL, it represents the length of the source string.
1472 Return NULL_TREE if no simplification can be made. */
1474 static bool
1475 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1476 tree dest, tree src, tree len)
1478 location_t loc = gimple_location (gsi_stmt (*gsi));
1479 tree fn;
1481 /* If the LEN parameter is zero, return DEST. */
1482 if (integer_zerop (len))
1484 replace_call_with_value (gsi, dest);
1485 return true;
1488 /* We can't compare slen with len as constants below if len is not a
1489 constant. */
1490 if (TREE_CODE (len) != INTEGER_CST)
1491 return false;
1493 /* Now, we must be passed a constant src ptr parameter. */
1494 tree slen = get_maxval_strlen (src, 0);
1495 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1496 return false;
1498 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1500 /* We do not support simplification of this case, though we do
1501 support it when expanding trees into RTL. */
1502 /* FIXME: generate a call to __builtin_memset. */
1503 if (tree_int_cst_lt (slen, len))
1504 return false;
1506 /* OK transform into builtin memcpy. */
1507 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1508 if (!fn)
1509 return false;
1511 len = fold_convert_loc (loc, size_type_node, len);
1512 len = force_gimple_operand_gsi (gsi, len, true,
1513 NULL_TREE, true, GSI_SAME_STMT);
1514 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1515 replace_call_with_call_and_fold (gsi, repl);
1516 return true;
1519 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1520 to the call.
1522 Return NULL_TREE if no simplification was possible, otherwise return the
1523 simplified form of the call as a tree.
1525 The simplified form may be a constant or other expression which
1526 computes the same value, but in a more efficient manner (including
1527 calls to other builtin functions).
1529 The call may contain arguments which need to be evaluated, but
1530 which are not useful to determine the result of the call. In
1531 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1532 COMPOUND_EXPR will be an argument which must be evaluated.
1533 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1534 COMPOUND_EXPR in the chain will contain the tree for the simplified
1535 form of the builtin function call. */
1537 static bool
1538 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1540 gimple stmt = gsi_stmt (*gsi);
1541 location_t loc = gimple_location (stmt);
1543 const char *p = c_getstr (src);
1545 /* If the string length is zero, return the dst parameter. */
1546 if (p && *p == '\0')
1548 replace_call_with_value (gsi, dst);
1549 return true;
1552 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1553 return false;
1555 /* See if we can store by pieces into (dst + strlen(dst)). */
1556 tree newdst;
1557 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1558 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1560 if (!strlen_fn || !memcpy_fn)
1561 return false;
1563 /* If the length of the source string isn't computable don't
1564 split strcat into strlen and memcpy. */
1565 tree len = get_maxval_strlen (src, 0);
1566 if (! len)
1567 return false;
1569 /* Create strlen (dst). */
1570 gimple_seq stmts = NULL, stmts2;
1571 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1572 gimple_set_location (repl, loc);
1573 if (gimple_in_ssa_p (cfun))
1574 newdst = make_ssa_name (size_type_node);
1575 else
1576 newdst = create_tmp_reg (size_type_node);
1577 gimple_call_set_lhs (repl, newdst);
1578 gimple_seq_add_stmt_without_update (&stmts, repl);
1580 /* Create (dst p+ strlen (dst)). */
1581 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1582 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1583 gimple_seq_add_seq_without_update (&stmts, stmts2);
1585 len = fold_convert_loc (loc, size_type_node, len);
1586 len = size_binop_loc (loc, PLUS_EXPR, len,
1587 build_int_cst (size_type_node, 1));
1588 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1589 gimple_seq_add_seq_without_update (&stmts, stmts2);
1591 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1592 gimple_seq_add_stmt_without_update (&stmts, repl);
1593 if (gimple_call_lhs (stmt))
1595 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1596 gimple_seq_add_stmt_without_update (&stmts, repl);
1597 gsi_replace_with_seq_vops (gsi, stmts);
1598 /* gsi now points at the assignment to the lhs, get a
1599 stmt iterator to the memcpy call.
1600 ??? We can't use gsi_for_stmt as that doesn't work when the
1601 CFG isn't built yet. */
1602 gimple_stmt_iterator gsi2 = *gsi;
1603 gsi_prev (&gsi2);
1604 fold_stmt (&gsi2);
1606 else
1608 gsi_replace_with_seq_vops (gsi, stmts);
1609 fold_stmt (gsi);
1611 return true;
1614 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1615 are the arguments to the call. */
1617 static bool
1618 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1620 gimple stmt = gsi_stmt (*gsi);
1621 tree dest = gimple_call_arg (stmt, 0);
1622 tree src = gimple_call_arg (stmt, 1);
1623 tree size = gimple_call_arg (stmt, 2);
1624 tree fn;
1625 const char *p;
1628 p = c_getstr (src);
1629 /* If the SRC parameter is "", return DEST. */
1630 if (p && *p == '\0')
1632 replace_call_with_value (gsi, dest);
1633 return true;
1636 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1637 return false;
1639 /* If __builtin_strcat_chk is used, assume strcat is available. */
1640 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1641 if (!fn)
1642 return false;
1644 gimple repl = gimple_build_call (fn, 2, dest, src);
1645 replace_call_with_call_and_fold (gsi, repl);
1646 return true;
1649 /* Simplify a call to the strncat builtin. */
1651 static bool
1652 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1654 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1655 tree dst = gimple_call_arg (stmt, 0);
1656 tree src = gimple_call_arg (stmt, 1);
1657 tree len = gimple_call_arg (stmt, 2);
1659 const char *p = c_getstr (src);
1661 /* If the requested length is zero, or the src parameter string
1662 length is zero, return the dst parameter. */
1663 if (integer_zerop (len) || (p && *p == '\0'))
1665 replace_call_with_value (gsi, dst);
1666 return true;
1669 /* If the requested len is greater than or equal to the string
1670 length, call strcat. */
1671 if (TREE_CODE (len) == INTEGER_CST && p
1672 && compare_tree_int (len, strlen (p)) >= 0)
1674 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1676 /* If the replacement _DECL isn't initialized, don't do the
1677 transformation. */
1678 if (!fn)
1679 return false;
1681 gcall *repl = gimple_build_call (fn, 2, dst, src);
1682 replace_call_with_call_and_fold (gsi, repl);
1683 return true;
1686 return false;
1689 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1690 LEN, and SIZE. */
1692 static bool
1693 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1695 gimple stmt = gsi_stmt (*gsi);
1696 tree dest = gimple_call_arg (stmt, 0);
1697 tree src = gimple_call_arg (stmt, 1);
1698 tree len = gimple_call_arg (stmt, 2);
1699 tree size = gimple_call_arg (stmt, 3);
1700 tree fn;
1701 const char *p;
1703 p = c_getstr (src);
1704 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1705 if ((p && *p == '\0')
1706 || integer_zerop (len))
1708 replace_call_with_value (gsi, dest);
1709 return true;
1712 if (! tree_fits_uhwi_p (size))
1713 return false;
1715 if (! integer_all_onesp (size))
1717 tree src_len = c_strlen (src, 1);
1718 if (src_len
1719 && tree_fits_uhwi_p (src_len)
1720 && tree_fits_uhwi_p (len)
1721 && ! tree_int_cst_lt (len, src_len))
1723 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1724 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1725 if (!fn)
1726 return false;
1728 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1729 replace_call_with_call_and_fold (gsi, repl);
1730 return true;
1732 return false;
1735 /* If __builtin_strncat_chk is used, assume strncat is available. */
1736 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1737 if (!fn)
1738 return false;
1740 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1741 replace_call_with_call_and_fold (gsi, repl);
1742 return true;
1745 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1746 to the call. IGNORE is true if the value returned
1747 by the builtin will be ignored. UNLOCKED is true is true if this
1748 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1749 the known length of the string. Return NULL_TREE if no simplification
1750 was possible. */
1752 static bool
1753 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1754 tree arg0, tree arg1,
1755 bool unlocked)
1757 gimple stmt = gsi_stmt (*gsi);
1759 /* If we're using an unlocked function, assume the other unlocked
1760 functions exist explicitly. */
1761 tree const fn_fputc = (unlocked
1762 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1763 : builtin_decl_implicit (BUILT_IN_FPUTC));
1764 tree const fn_fwrite = (unlocked
1765 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1766 : builtin_decl_implicit (BUILT_IN_FWRITE));
1768 /* If the return value is used, don't do the transformation. */
1769 if (gimple_call_lhs (stmt))
1770 return false;
1772 /* Get the length of the string passed to fputs. If the length
1773 can't be determined, punt. */
1774 tree len = get_maxval_strlen (arg0, 0);
1775 if (!len
1776 || TREE_CODE (len) != INTEGER_CST)
1777 return false;
1779 switch (compare_tree_int (len, 1))
1781 case -1: /* length is 0, delete the call entirely . */
1782 replace_call_with_value (gsi, integer_zero_node);
1783 return true;
1785 case 0: /* length is 1, call fputc. */
1787 const char *p = c_getstr (arg0);
1788 if (p != NULL)
1790 if (!fn_fputc)
1791 return false;
1793 gimple repl = gimple_build_call (fn_fputc, 2,
1794 build_int_cst
1795 (integer_type_node, p[0]), arg1);
1796 replace_call_with_call_and_fold (gsi, repl);
1797 return true;
1800 /* FALLTHROUGH */
1801 case 1: /* length is greater than 1, call fwrite. */
1803 /* If optimizing for size keep fputs. */
1804 if (optimize_function_for_size_p (cfun))
1805 return false;
1806 /* New argument list transforming fputs(string, stream) to
1807 fwrite(string, 1, len, stream). */
1808 if (!fn_fwrite)
1809 return false;
1811 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1812 size_one_node, len, arg1);
1813 replace_call_with_call_and_fold (gsi, repl);
1814 return true;
1816 default:
1817 gcc_unreachable ();
1819 return false;
1822 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1823 DEST, SRC, LEN, and SIZE are the arguments to the call.
1824 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1825 code of the builtin. If MAXLEN is not NULL, it is maximum length
1826 passed as third argument. */
1828 static bool
1829 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1830 tree dest, tree src, tree len, tree size,
1831 enum built_in_function fcode)
1833 gimple stmt = gsi_stmt (*gsi);
1834 location_t loc = gimple_location (stmt);
1835 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1836 tree fn;
1838 /* If SRC and DEST are the same (and not volatile), return DEST
1839 (resp. DEST+LEN for __mempcpy_chk). */
1840 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1842 if (fcode != BUILT_IN_MEMPCPY_CHK)
1844 replace_call_with_value (gsi, dest);
1845 return true;
1847 else
1849 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1850 temp = force_gimple_operand_gsi (gsi, temp,
1851 false, NULL_TREE, true,
1852 GSI_SAME_STMT);
1853 replace_call_with_value (gsi, temp);
1854 return true;
1858 if (! tree_fits_uhwi_p (size))
1859 return false;
1861 tree maxlen = get_maxval_strlen (len, 2);
1862 if (! integer_all_onesp (size))
1864 if (! tree_fits_uhwi_p (len))
1866 /* If LEN is not constant, try MAXLEN too.
1867 For MAXLEN only allow optimizing into non-_ocs function
1868 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1869 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1871 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1873 /* (void) __mempcpy_chk () can be optimized into
1874 (void) __memcpy_chk (). */
1875 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1876 if (!fn)
1877 return false;
1879 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1880 replace_call_with_call_and_fold (gsi, repl);
1881 return true;
1883 return false;
1886 else
1887 maxlen = len;
1889 if (tree_int_cst_lt (size, maxlen))
1890 return false;
1893 fn = NULL_TREE;
1894 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1895 mem{cpy,pcpy,move,set} is available. */
1896 switch (fcode)
1898 case BUILT_IN_MEMCPY_CHK:
1899 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1900 break;
1901 case BUILT_IN_MEMPCPY_CHK:
1902 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1903 break;
1904 case BUILT_IN_MEMMOVE_CHK:
1905 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1906 break;
1907 case BUILT_IN_MEMSET_CHK:
1908 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1909 break;
1910 default:
1911 break;
1914 if (!fn)
1915 return false;
1917 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1918 replace_call_with_call_and_fold (gsi, repl);
1919 return true;
1922 /* Fold a call to the __st[rp]cpy_chk builtin.
1923 DEST, SRC, and SIZE are the arguments to the call.
1924 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1925 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1926 strings passed as second argument. */
1928 static bool
1929 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1930 tree dest,
1931 tree src, tree size,
1932 enum built_in_function fcode)
1934 gimple stmt = gsi_stmt (*gsi);
1935 location_t loc = gimple_location (stmt);
1936 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1937 tree len, fn;
1939 /* If SRC and DEST are the same (and not volatile), return DEST. */
1940 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1942 replace_call_with_value (gsi, dest);
1943 return true;
1946 if (! tree_fits_uhwi_p (size))
1947 return false;
1949 tree maxlen = get_maxval_strlen (src, 1);
1950 if (! integer_all_onesp (size))
1952 len = c_strlen (src, 1);
1953 if (! len || ! tree_fits_uhwi_p (len))
1955 /* If LEN is not constant, try MAXLEN too.
1956 For MAXLEN only allow optimizing into non-_ocs function
1957 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1958 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1960 if (fcode == BUILT_IN_STPCPY_CHK)
1962 if (! ignore)
1963 return false;
1965 /* If return value of __stpcpy_chk is ignored,
1966 optimize into __strcpy_chk. */
1967 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1968 if (!fn)
1969 return false;
1971 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1972 replace_call_with_call_and_fold (gsi, repl);
1973 return true;
1976 if (! len || TREE_SIDE_EFFECTS (len))
1977 return false;
1979 /* If c_strlen returned something, but not a constant,
1980 transform __strcpy_chk into __memcpy_chk. */
1981 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1982 if (!fn)
1983 return false;
1985 len = fold_convert_loc (loc, size_type_node, len);
1986 len = size_binop_loc (loc, PLUS_EXPR, len,
1987 build_int_cst (size_type_node, 1));
1988 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1989 true, GSI_SAME_STMT);
1990 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1991 replace_call_with_call_and_fold (gsi, repl);
1992 return true;
1995 else
1996 maxlen = len;
1998 if (! tree_int_cst_lt (maxlen, size))
1999 return false;
2002 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2003 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2004 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2005 if (!fn)
2006 return false;
2008 gimple repl = gimple_build_call (fn, 2, dest, src);
2009 replace_call_with_call_and_fold (gsi, repl);
2010 return true;
2013 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2014 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2015 length passed as third argument. IGNORE is true if return value can be
2016 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2018 static bool
2019 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2020 tree dest, tree src,
2021 tree len, tree size,
2022 enum built_in_function fcode)
2024 gimple stmt = gsi_stmt (*gsi);
2025 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2026 tree fn;
2028 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2030 /* If return value of __stpncpy_chk is ignored,
2031 optimize into __strncpy_chk. */
2032 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2033 if (fn)
2035 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
2036 replace_call_with_call_and_fold (gsi, repl);
2037 return true;
2041 if (! tree_fits_uhwi_p (size))
2042 return false;
2044 tree maxlen = get_maxval_strlen (len, 2);
2045 if (! integer_all_onesp (size))
2047 if (! tree_fits_uhwi_p (len))
2049 /* If LEN is not constant, try MAXLEN too.
2050 For MAXLEN only allow optimizing into non-_ocs function
2051 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2052 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2053 return false;
2055 else
2056 maxlen = len;
2058 if (tree_int_cst_lt (size, maxlen))
2059 return false;
2062 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2063 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2064 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2065 if (!fn)
2066 return false;
2068 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2069 replace_call_with_call_and_fold (gsi, repl);
2070 return true;
2073 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2074 Return NULL_TREE if no simplification can be made. */
2076 static bool
2077 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2079 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2080 location_t loc = gimple_location (stmt);
2081 tree dest = gimple_call_arg (stmt, 0);
2082 tree src = gimple_call_arg (stmt, 1);
2083 tree fn, len, lenp1;
2085 /* If the result is unused, replace stpcpy with strcpy. */
2086 if (gimple_call_lhs (stmt) == NULL_TREE)
2088 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2089 if (!fn)
2090 return false;
2091 gimple_call_set_fndecl (stmt, fn);
2092 fold_stmt (gsi);
2093 return true;
2096 len = c_strlen (src, 1);
2097 if (!len
2098 || TREE_CODE (len) != INTEGER_CST)
2099 return false;
2101 if (optimize_function_for_size_p (cfun)
2102 /* If length is zero it's small enough. */
2103 && !integer_zerop (len))
2104 return false;
2106 /* If the source has a known length replace stpcpy with memcpy. */
2107 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2108 if (!fn)
2109 return false;
2111 gimple_seq stmts = NULL;
2112 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2113 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2114 tem, build_int_cst (size_type_node, 1));
2115 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2116 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2117 gimple_set_vuse (repl, gimple_vuse (stmt));
2118 gimple_set_vdef (repl, gimple_vdef (stmt));
2119 if (gimple_vdef (repl)
2120 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2121 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2122 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2123 /* Replace the result with dest + len. */
2124 stmts = NULL;
2125 tem = gimple_convert (&stmts, loc, sizetype, len);
2126 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2127 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2128 POINTER_PLUS_EXPR, dest, tem);
2129 gsi_replace (gsi, ret, true);
2130 /* Finally fold the memcpy call. */
2131 gimple_stmt_iterator gsi2 = *gsi;
2132 gsi_prev (&gsi2);
2133 fold_stmt (&gsi2);
2134 return true;
2137 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2138 NULL_TREE if a normal call should be emitted rather than expanding
2139 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2140 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2141 passed as second argument. */
2143 static bool
2144 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2145 enum built_in_function fcode)
2147 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2148 tree dest, size, len, fn, fmt, flag;
2149 const char *fmt_str;
2151 /* Verify the required arguments in the original call. */
2152 if (gimple_call_num_args (stmt) < 5)
2153 return false;
2155 dest = gimple_call_arg (stmt, 0);
2156 len = gimple_call_arg (stmt, 1);
2157 flag = gimple_call_arg (stmt, 2);
2158 size = gimple_call_arg (stmt, 3);
2159 fmt = gimple_call_arg (stmt, 4);
2161 if (! tree_fits_uhwi_p (size))
2162 return false;
2164 if (! integer_all_onesp (size))
2166 tree maxlen = get_maxval_strlen (len, 2);
2167 if (! tree_fits_uhwi_p (len))
2169 /* If LEN is not constant, try MAXLEN too.
2170 For MAXLEN only allow optimizing into non-_ocs function
2171 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2172 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2173 return false;
2175 else
2176 maxlen = len;
2178 if (tree_int_cst_lt (size, maxlen))
2179 return false;
2182 if (!init_target_chars ())
2183 return false;
2185 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2186 or if format doesn't contain % chars or is "%s". */
2187 if (! integer_zerop (flag))
2189 fmt_str = c_getstr (fmt);
2190 if (fmt_str == NULL)
2191 return false;
2192 if (strchr (fmt_str, target_percent) != NULL
2193 && strcmp (fmt_str, target_percent_s))
2194 return false;
2197 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2198 available. */
2199 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2200 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2201 if (!fn)
2202 return false;
2204 /* Replace the called function and the first 5 argument by 3 retaining
2205 trailing varargs. */
2206 gimple_call_set_fndecl (stmt, fn);
2207 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2208 gimple_call_set_arg (stmt, 0, dest);
2209 gimple_call_set_arg (stmt, 1, len);
2210 gimple_call_set_arg (stmt, 2, fmt);
2211 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2212 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2213 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2214 fold_stmt (gsi);
2215 return true;
2218 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2219 Return NULL_TREE if a normal call should be emitted rather than
2220 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2221 or BUILT_IN_VSPRINTF_CHK. */
2223 static bool
2224 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2225 enum built_in_function fcode)
2227 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2228 tree dest, size, len, fn, fmt, flag;
2229 const char *fmt_str;
2230 unsigned nargs = gimple_call_num_args (stmt);
2232 /* Verify the required arguments in the original call. */
2233 if (nargs < 4)
2234 return false;
2235 dest = gimple_call_arg (stmt, 0);
2236 flag = gimple_call_arg (stmt, 1);
2237 size = gimple_call_arg (stmt, 2);
2238 fmt = gimple_call_arg (stmt, 3);
2240 if (! tree_fits_uhwi_p (size))
2241 return false;
2243 len = NULL_TREE;
2245 if (!init_target_chars ())
2246 return false;
2248 /* Check whether the format is a literal string constant. */
2249 fmt_str = c_getstr (fmt);
2250 if (fmt_str != NULL)
2252 /* If the format doesn't contain % args or %%, we know the size. */
2253 if (strchr (fmt_str, target_percent) == 0)
2255 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2256 len = build_int_cstu (size_type_node, strlen (fmt_str));
2258 /* If the format is "%s" and first ... argument is a string literal,
2259 we know the size too. */
2260 else if (fcode == BUILT_IN_SPRINTF_CHK
2261 && strcmp (fmt_str, target_percent_s) == 0)
2263 tree arg;
2265 if (nargs == 5)
2267 arg = gimple_call_arg (stmt, 4);
2268 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2270 len = c_strlen (arg, 1);
2271 if (! len || ! tree_fits_uhwi_p (len))
2272 len = NULL_TREE;
2278 if (! integer_all_onesp (size))
2280 if (! len || ! tree_int_cst_lt (len, size))
2281 return false;
2284 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2285 or if format doesn't contain % chars or is "%s". */
2286 if (! integer_zerop (flag))
2288 if (fmt_str == NULL)
2289 return false;
2290 if (strchr (fmt_str, target_percent) != NULL
2291 && strcmp (fmt_str, target_percent_s))
2292 return false;
2295 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2296 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2297 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2298 if (!fn)
2299 return false;
2301 /* Replace the called function and the first 4 argument by 2 retaining
2302 trailing varargs. */
2303 gimple_call_set_fndecl (stmt, fn);
2304 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2305 gimple_call_set_arg (stmt, 0, dest);
2306 gimple_call_set_arg (stmt, 1, fmt);
2307 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2308 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2309 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2310 fold_stmt (gsi);
2311 return true;
2314 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2315 ORIG may be null if this is a 2-argument call. We don't attempt to
2316 simplify calls with more than 3 arguments.
2318 Return NULL_TREE if no simplification was possible, otherwise return the
2319 simplified form of the call as a tree. If IGNORED is true, it means that
2320 the caller does not use the returned value of the function. */
2322 static bool
2323 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2325 gimple stmt = gsi_stmt (*gsi);
2326 tree dest = gimple_call_arg (stmt, 0);
2327 tree fmt = gimple_call_arg (stmt, 1);
2328 tree orig = NULL_TREE;
2329 const char *fmt_str = NULL;
2331 /* Verify the required arguments in the original call. We deal with two
2332 types of sprintf() calls: 'sprintf (str, fmt)' and
2333 'sprintf (dest, "%s", orig)'. */
2334 if (gimple_call_num_args (stmt) > 3)
2335 return false;
2337 if (gimple_call_num_args (stmt) == 3)
2338 orig = gimple_call_arg (stmt, 2);
2340 /* Check whether the format is a literal string constant. */
2341 fmt_str = c_getstr (fmt);
2342 if (fmt_str == NULL)
2343 return false;
2345 if (!init_target_chars ())
2346 return false;
2348 /* If the format doesn't contain % args or %%, use strcpy. */
2349 if (strchr (fmt_str, target_percent) == NULL)
2351 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2353 if (!fn)
2354 return false;
2356 /* Don't optimize sprintf (buf, "abc", ptr++). */
2357 if (orig)
2358 return false;
2360 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2361 'format' is known to contain no % formats. */
2362 gimple_seq stmts = NULL;
2363 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2364 gimple_seq_add_stmt_without_update (&stmts, repl);
2365 if (gimple_call_lhs (stmt))
2367 repl = gimple_build_assign (gimple_call_lhs (stmt),
2368 build_int_cst (integer_type_node,
2369 strlen (fmt_str)));
2370 gimple_seq_add_stmt_without_update (&stmts, repl);
2371 gsi_replace_with_seq_vops (gsi, stmts);
2372 /* gsi now points at the assignment to the lhs, get a
2373 stmt iterator to the memcpy call.
2374 ??? We can't use gsi_for_stmt as that doesn't work when the
2375 CFG isn't built yet. */
2376 gimple_stmt_iterator gsi2 = *gsi;
2377 gsi_prev (&gsi2);
2378 fold_stmt (&gsi2);
2380 else
2382 gsi_replace_with_seq_vops (gsi, stmts);
2383 fold_stmt (gsi);
2385 return true;
2388 /* If the format is "%s", use strcpy if the result isn't used. */
2389 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2391 tree fn;
2392 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2394 if (!fn)
2395 return false;
2397 /* Don't crash on sprintf (str1, "%s"). */
2398 if (!orig)
2399 return false;
2401 tree orig_len = NULL_TREE;
2402 if (gimple_call_lhs (stmt))
2404 orig_len = get_maxval_strlen (orig, 0);
2405 if (!orig_len)
2406 return false;
2409 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2410 gimple_seq stmts = NULL;
2411 gimple repl = gimple_build_call (fn, 2, dest, orig);
2412 gimple_seq_add_stmt_without_update (&stmts, repl);
2413 if (gimple_call_lhs (stmt))
2415 if (!useless_type_conversion_p (integer_type_node,
2416 TREE_TYPE (orig_len)))
2417 orig_len = fold_convert (integer_type_node, orig_len);
2418 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2419 gimple_seq_add_stmt_without_update (&stmts, repl);
2420 gsi_replace_with_seq_vops (gsi, stmts);
2421 /* gsi now points at the assignment to the lhs, get a
2422 stmt iterator to the memcpy call.
2423 ??? We can't use gsi_for_stmt as that doesn't work when the
2424 CFG isn't built yet. */
2425 gimple_stmt_iterator gsi2 = *gsi;
2426 gsi_prev (&gsi2);
2427 fold_stmt (&gsi2);
2429 else
2431 gsi_replace_with_seq_vops (gsi, stmts);
2432 fold_stmt (gsi);
2434 return true;
2436 return false;
2439 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2440 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2441 attempt to simplify calls with more than 4 arguments.
2443 Return NULL_TREE if no simplification was possible, otherwise return the
2444 simplified form of the call as a tree. If IGNORED is true, it means that
2445 the caller does not use the returned value of the function. */
2447 static bool
2448 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2450 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2451 tree dest = gimple_call_arg (stmt, 0);
2452 tree destsize = gimple_call_arg (stmt, 1);
2453 tree fmt = gimple_call_arg (stmt, 2);
2454 tree orig = NULL_TREE;
2455 const char *fmt_str = NULL;
2457 if (gimple_call_num_args (stmt) > 4)
2458 return false;
2460 if (gimple_call_num_args (stmt) == 4)
2461 orig = gimple_call_arg (stmt, 3);
2463 if (!tree_fits_uhwi_p (destsize))
2464 return false;
2465 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2467 /* Check whether the format is a literal string constant. */
2468 fmt_str = c_getstr (fmt);
2469 if (fmt_str == NULL)
2470 return false;
2472 if (!init_target_chars ())
2473 return false;
2475 /* If the format doesn't contain % args or %%, use strcpy. */
2476 if (strchr (fmt_str, target_percent) == NULL)
2478 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2479 if (!fn)
2480 return false;
2482 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2483 if (orig)
2484 return false;
2486 /* We could expand this as
2487 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2488 or to
2489 memcpy (str, fmt_with_nul_at_cstm1, cst);
2490 but in the former case that might increase code size
2491 and in the latter case grow .rodata section too much.
2492 So punt for now. */
2493 size_t len = strlen (fmt_str);
2494 if (len >= destlen)
2495 return false;
2497 gimple_seq stmts = NULL;
2498 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2499 gimple_seq_add_stmt_without_update (&stmts, repl);
2500 if (gimple_call_lhs (stmt))
2502 repl = gimple_build_assign (gimple_call_lhs (stmt),
2503 build_int_cst (integer_type_node, len));
2504 gimple_seq_add_stmt_without_update (&stmts, repl);
2505 gsi_replace_with_seq_vops (gsi, stmts);
2506 /* gsi now points at the assignment to the lhs, get a
2507 stmt iterator to the memcpy call.
2508 ??? We can't use gsi_for_stmt as that doesn't work when the
2509 CFG isn't built yet. */
2510 gimple_stmt_iterator gsi2 = *gsi;
2511 gsi_prev (&gsi2);
2512 fold_stmt (&gsi2);
2514 else
2516 gsi_replace_with_seq_vops (gsi, stmts);
2517 fold_stmt (gsi);
2519 return true;
2522 /* If the format is "%s", use strcpy if the result isn't used. */
2523 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2525 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2526 if (!fn)
2527 return false;
2529 /* Don't crash on snprintf (str1, cst, "%s"). */
2530 if (!orig)
2531 return false;
2533 tree orig_len = get_maxval_strlen (orig, 0);
2534 if (!orig_len)
2535 return false;
2537 /* We could expand this as
2538 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2539 or to
2540 memcpy (str1, str2_with_nul_at_cstm1, cst);
2541 but in the former case that might increase code size
2542 and in the latter case grow .rodata section too much.
2543 So punt for now. */
2544 if (compare_tree_int (orig_len, destlen) >= 0)
2545 return false;
2547 /* Convert snprintf (str1, cst, "%s", str2) into
2548 strcpy (str1, str2) if strlen (str2) < cst. */
2549 gimple_seq stmts = NULL;
2550 gimple repl = gimple_build_call (fn, 2, dest, orig);
2551 gimple_seq_add_stmt_without_update (&stmts, repl);
2552 if (gimple_call_lhs (stmt))
2554 if (!useless_type_conversion_p (integer_type_node,
2555 TREE_TYPE (orig_len)))
2556 orig_len = fold_convert (integer_type_node, orig_len);
2557 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2558 gimple_seq_add_stmt_without_update (&stmts, repl);
2559 gsi_replace_with_seq_vops (gsi, stmts);
2560 /* gsi now points at the assignment to the lhs, get a
2561 stmt iterator to the memcpy call.
2562 ??? We can't use gsi_for_stmt as that doesn't work when the
2563 CFG isn't built yet. */
2564 gimple_stmt_iterator gsi2 = *gsi;
2565 gsi_prev (&gsi2);
2566 fold_stmt (&gsi2);
2568 else
2570 gsi_replace_with_seq_vops (gsi, stmts);
2571 fold_stmt (gsi);
2573 return true;
2575 return false;
2578 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2579 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2580 more than 3 arguments, and ARG may be null in the 2-argument case.
2582 Return NULL_TREE if no simplification was possible, otherwise return the
2583 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2584 code of the function to be simplified. */
2586 static bool
2587 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2588 tree fp, tree fmt, tree arg,
2589 enum built_in_function fcode)
2591 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2592 tree fn_fputc, fn_fputs;
2593 const char *fmt_str = NULL;
2595 /* If the return value is used, don't do the transformation. */
2596 if (gimple_call_lhs (stmt) != NULL_TREE)
2597 return false;
2599 /* Check whether the format is a literal string constant. */
2600 fmt_str = c_getstr (fmt);
2601 if (fmt_str == NULL)
2602 return false;
2604 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2606 /* If we're using an unlocked function, assume the other
2607 unlocked functions exist explicitly. */
2608 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2609 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2611 else
2613 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2614 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2617 if (!init_target_chars ())
2618 return false;
2620 /* If the format doesn't contain % args or %%, use strcpy. */
2621 if (strchr (fmt_str, target_percent) == NULL)
2623 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2624 && arg)
2625 return false;
2627 /* If the format specifier was "", fprintf does nothing. */
2628 if (fmt_str[0] == '\0')
2630 replace_call_with_value (gsi, NULL_TREE);
2631 return true;
2634 /* When "string" doesn't contain %, replace all cases of
2635 fprintf (fp, string) with fputs (string, fp). The fputs
2636 builtin will take care of special cases like length == 1. */
2637 if (fn_fputs)
2639 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2640 replace_call_with_call_and_fold (gsi, repl);
2641 return true;
2645 /* The other optimizations can be done only on the non-va_list variants. */
2646 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2647 return false;
2649 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2650 else if (strcmp (fmt_str, target_percent_s) == 0)
2652 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2653 return false;
2654 if (fn_fputs)
2656 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2657 replace_call_with_call_and_fold (gsi, repl);
2658 return true;
2662 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2663 else if (strcmp (fmt_str, target_percent_c) == 0)
2665 if (!arg
2666 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2667 return false;
2668 if (fn_fputc)
2670 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2671 replace_call_with_call_and_fold (gsi, repl);
2672 return true;
2676 return false;
2679 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2680 FMT and ARG are the arguments to the call; we don't fold cases with
2681 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2683 Return NULL_TREE if no simplification was possible, otherwise return the
2684 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2685 code of the function to be simplified. */
2687 static bool
2688 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2689 tree arg, enum built_in_function fcode)
2691 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2692 tree fn_putchar, fn_puts, newarg;
2693 const char *fmt_str = NULL;
2695 /* If the return value is used, don't do the transformation. */
2696 if (gimple_call_lhs (stmt) != NULL_TREE)
2697 return false;
2699 /* Check whether the format is a literal string constant. */
2700 fmt_str = c_getstr (fmt);
2701 if (fmt_str == NULL)
2702 return false;
2704 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2706 /* If we're using an unlocked function, assume the other
2707 unlocked functions exist explicitly. */
2708 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2709 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2711 else
2713 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2714 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2717 if (!init_target_chars ())
2718 return false;
2720 if (strcmp (fmt_str, target_percent_s) == 0
2721 || strchr (fmt_str, target_percent) == NULL)
2723 const char *str;
2725 if (strcmp (fmt_str, target_percent_s) == 0)
2727 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2728 return false;
2730 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2731 return false;
2733 str = c_getstr (arg);
2734 if (str == NULL)
2735 return false;
2737 else
2739 /* The format specifier doesn't contain any '%' characters. */
2740 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2741 && arg)
2742 return false;
2743 str = fmt_str;
2746 /* If the string was "", printf does nothing. */
2747 if (str[0] == '\0')
2749 replace_call_with_value (gsi, NULL_TREE);
2750 return true;
2753 /* If the string has length of 1, call putchar. */
2754 if (str[1] == '\0')
2756 /* Given printf("c"), (where c is any one character,)
2757 convert "c"[0] to an int and pass that to the replacement
2758 function. */
2759 newarg = build_int_cst (integer_type_node, str[0]);
2760 if (fn_putchar)
2762 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2763 replace_call_with_call_and_fold (gsi, repl);
2764 return true;
2767 else
2769 /* If the string was "string\n", call puts("string"). */
2770 size_t len = strlen (str);
2771 if ((unsigned char)str[len - 1] == target_newline
2772 && (size_t) (int) len == len
2773 && (int) len > 0)
2775 char *newstr;
2776 tree offset_node, string_cst;
2778 /* Create a NUL-terminated string that's one char shorter
2779 than the original, stripping off the trailing '\n'. */
2780 newarg = build_string_literal (len, str);
2781 string_cst = string_constant (newarg, &offset_node);
2782 gcc_checking_assert (string_cst
2783 && (TREE_STRING_LENGTH (string_cst)
2784 == (int) len)
2785 && integer_zerop (offset_node)
2786 && (unsigned char)
2787 TREE_STRING_POINTER (string_cst)[len - 1]
2788 == target_newline);
2789 /* build_string_literal creates a new STRING_CST,
2790 modify it in place to avoid double copying. */
2791 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2792 newstr[len - 1] = '\0';
2793 if (fn_puts)
2795 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2796 replace_call_with_call_and_fold (gsi, repl);
2797 return true;
2800 else
2801 /* We'd like to arrange to call fputs(string,stdout) here,
2802 but we need stdout and don't have a way to get it yet. */
2803 return false;
2807 /* The other optimizations can be done only on the non-va_list variants. */
2808 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2809 return false;
2811 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2812 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2814 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2815 return false;
2816 if (fn_puts)
2818 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2819 replace_call_with_call_and_fold (gsi, repl);
2820 return true;
2824 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2825 else if (strcmp (fmt_str, target_percent_c) == 0)
2827 if (!arg || ! useless_type_conversion_p (integer_type_node,
2828 TREE_TYPE (arg)))
2829 return false;
2830 if (fn_putchar)
2832 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2833 replace_call_with_call_and_fold (gsi, repl);
2834 return true;
2838 return false;
2843 /* Fold a call to __builtin_strlen with known length LEN. */
2845 static bool
2846 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2848 gimple stmt = gsi_stmt (*gsi);
2849 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2850 if (!len)
2851 return false;
2852 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2853 replace_call_with_value (gsi, len);
2854 return true;
2858 /* Fold the non-target builtin at *GSI and return whether any simplification
2859 was made. */
2861 static bool
2862 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2864 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2865 tree callee = gimple_call_fndecl (stmt);
2867 /* Give up for always_inline inline builtins until they are
2868 inlined. */
2869 if (avoid_folding_inline_builtin (callee))
2870 return false;
2872 unsigned n = gimple_call_num_args (stmt);
2873 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2874 switch (fcode)
2876 case BUILT_IN_BZERO:
2877 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2878 gimple_call_arg (stmt, 1));
2879 case BUILT_IN_MEMSET:
2880 return gimple_fold_builtin_memset (gsi,
2881 gimple_call_arg (stmt, 1),
2882 gimple_call_arg (stmt, 2));
2883 case BUILT_IN_BCOPY:
2884 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2885 gimple_call_arg (stmt, 0), 3);
2886 case BUILT_IN_MEMCPY:
2887 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2888 gimple_call_arg (stmt, 1), 0);
2889 case BUILT_IN_MEMPCPY:
2890 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2891 gimple_call_arg (stmt, 1), 1);
2892 case BUILT_IN_MEMMOVE:
2893 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2894 gimple_call_arg (stmt, 1), 3);
2895 case BUILT_IN_SPRINTF_CHK:
2896 case BUILT_IN_VSPRINTF_CHK:
2897 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2898 case BUILT_IN_STRCAT_CHK:
2899 return gimple_fold_builtin_strcat_chk (gsi);
2900 case BUILT_IN_STRNCAT_CHK:
2901 return gimple_fold_builtin_strncat_chk (gsi);
2902 case BUILT_IN_STRLEN:
2903 return gimple_fold_builtin_strlen (gsi);
2904 case BUILT_IN_STRCPY:
2905 return gimple_fold_builtin_strcpy (gsi,
2906 gimple_call_arg (stmt, 0),
2907 gimple_call_arg (stmt, 1));
2908 case BUILT_IN_STRNCPY:
2909 return gimple_fold_builtin_strncpy (gsi,
2910 gimple_call_arg (stmt, 0),
2911 gimple_call_arg (stmt, 1),
2912 gimple_call_arg (stmt, 2));
2913 case BUILT_IN_STRCAT:
2914 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2915 gimple_call_arg (stmt, 1));
2916 case BUILT_IN_STRNCAT:
2917 return gimple_fold_builtin_strncat (gsi);
2918 case BUILT_IN_FPUTS:
2919 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2920 gimple_call_arg (stmt, 1), false);
2921 case BUILT_IN_FPUTS_UNLOCKED:
2922 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2923 gimple_call_arg (stmt, 1), true);
2924 case BUILT_IN_MEMCPY_CHK:
2925 case BUILT_IN_MEMPCPY_CHK:
2926 case BUILT_IN_MEMMOVE_CHK:
2927 case BUILT_IN_MEMSET_CHK:
2928 return gimple_fold_builtin_memory_chk (gsi,
2929 gimple_call_arg (stmt, 0),
2930 gimple_call_arg (stmt, 1),
2931 gimple_call_arg (stmt, 2),
2932 gimple_call_arg (stmt, 3),
2933 fcode);
2934 case BUILT_IN_STPCPY:
2935 return gimple_fold_builtin_stpcpy (gsi);
2936 case BUILT_IN_STRCPY_CHK:
2937 case BUILT_IN_STPCPY_CHK:
2938 return gimple_fold_builtin_stxcpy_chk (gsi,
2939 gimple_call_arg (stmt, 0),
2940 gimple_call_arg (stmt, 1),
2941 gimple_call_arg (stmt, 2),
2942 fcode);
2943 case BUILT_IN_STRNCPY_CHK:
2944 case BUILT_IN_STPNCPY_CHK:
2945 return gimple_fold_builtin_stxncpy_chk (gsi,
2946 gimple_call_arg (stmt, 0),
2947 gimple_call_arg (stmt, 1),
2948 gimple_call_arg (stmt, 2),
2949 gimple_call_arg (stmt, 3),
2950 fcode);
2951 case BUILT_IN_SNPRINTF_CHK:
2952 case BUILT_IN_VSNPRINTF_CHK:
2953 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2954 case BUILT_IN_SNPRINTF:
2955 return gimple_fold_builtin_snprintf (gsi);
2956 case BUILT_IN_SPRINTF:
2957 return gimple_fold_builtin_sprintf (gsi);
2958 case BUILT_IN_FPRINTF:
2959 case BUILT_IN_FPRINTF_UNLOCKED:
2960 case BUILT_IN_VFPRINTF:
2961 if (n == 2 || n == 3)
2962 return gimple_fold_builtin_fprintf (gsi,
2963 gimple_call_arg (stmt, 0),
2964 gimple_call_arg (stmt, 1),
2965 n == 3
2966 ? gimple_call_arg (stmt, 2)
2967 : NULL_TREE,
2968 fcode);
2969 break;
2970 case BUILT_IN_FPRINTF_CHK:
2971 case BUILT_IN_VFPRINTF_CHK:
2972 if (n == 3 || n == 4)
2973 return gimple_fold_builtin_fprintf (gsi,
2974 gimple_call_arg (stmt, 0),
2975 gimple_call_arg (stmt, 2),
2976 n == 4
2977 ? gimple_call_arg (stmt, 3)
2978 : NULL_TREE,
2979 fcode);
2980 break;
2981 case BUILT_IN_PRINTF:
2982 case BUILT_IN_PRINTF_UNLOCKED:
2983 case BUILT_IN_VPRINTF:
2984 if (n == 1 || n == 2)
2985 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2986 n == 2
2987 ? gimple_call_arg (stmt, 1)
2988 : NULL_TREE, fcode);
2989 break;
2990 case BUILT_IN_PRINTF_CHK:
2991 case BUILT_IN_VPRINTF_CHK:
2992 if (n == 2 || n == 3)
2993 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2994 n == 3
2995 ? gimple_call_arg (stmt, 2)
2996 : NULL_TREE, fcode);
2997 default:;
3000 /* Try the generic builtin folder. */
3001 bool ignore = (gimple_call_lhs (stmt) == NULL);
3002 tree result = fold_call_stmt (stmt, ignore);
3003 if (result)
3005 if (ignore)
3006 STRIP_NOPS (result);
3007 else
3008 result = fold_convert (gimple_call_return_type (stmt), result);
3009 if (!update_call_from_tree (gsi, result))
3010 gimplify_and_update_call_from_tree (gsi, result);
3011 return true;
3014 return false;
3017 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3018 doesn't fit into TYPE. The test for overflow should be regardless of
3019 -fwrapv, and even for unsigned types. */
3021 bool
3022 arith_overflowed_p (enum tree_code code, const_tree type,
3023 const_tree arg0, const_tree arg1)
3025 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3026 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3027 widest2_int_cst;
3028 widest2_int warg0 = widest2_int_cst (arg0);
3029 widest2_int warg1 = widest2_int_cst (arg1);
3030 widest2_int wres;
3031 switch (code)
3033 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3034 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3035 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3036 default: gcc_unreachable ();
3038 signop sign = TYPE_SIGN (type);
3039 if (sign == UNSIGNED && wi::neg_p (wres))
3040 return true;
3041 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3044 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3045 The statement may be replaced by another statement, e.g., if the call
3046 simplifies to a constant value. Return true if any changes were made.
3047 It is assumed that the operands have been previously folded. */
3049 static bool
3050 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3052 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3053 tree callee;
3054 bool changed = false;
3055 unsigned i;
3057 /* Fold *& in call arguments. */
3058 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3059 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3061 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3062 if (tmp)
3064 gimple_call_set_arg (stmt, i, tmp);
3065 changed = true;
3069 /* Check for virtual calls that became direct calls. */
3070 callee = gimple_call_fn (stmt);
3071 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3073 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3075 if (dump_file && virtual_method_call_p (callee)
3076 && !possible_polymorphic_call_target_p
3077 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3078 (OBJ_TYPE_REF_EXPR (callee)))))
3080 fprintf (dump_file,
3081 "Type inheritance inconsistent devirtualization of ");
3082 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3083 fprintf (dump_file, " to ");
3084 print_generic_expr (dump_file, callee, TDF_SLIM);
3085 fprintf (dump_file, "\n");
3088 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3089 changed = true;
3091 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3093 bool final;
3094 vec <cgraph_node *>targets
3095 = possible_polymorphic_call_targets (callee, stmt, &final);
3096 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3098 tree lhs = gimple_call_lhs (stmt);
3099 if (dump_enabled_p ())
3101 location_t loc = gimple_location_safe (stmt);
3102 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3103 "folding virtual function call to %s\n",
3104 targets.length () == 1
3105 ? targets[0]->name ()
3106 : "__builtin_unreachable");
3108 if (targets.length () == 1)
3110 gimple_call_set_fndecl (stmt, targets[0]->decl);
3111 changed = true;
3112 /* If the call becomes noreturn, remove the lhs. */
3113 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3115 if (TREE_CODE (lhs) == SSA_NAME)
3117 tree var = create_tmp_var (TREE_TYPE (lhs));
3118 tree def = get_or_create_ssa_default_def (cfun, var);
3119 gimple new_stmt = gimple_build_assign (lhs, def);
3120 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3122 gimple_call_set_lhs (stmt, NULL_TREE);
3124 maybe_remove_unused_call_args (cfun, stmt);
3126 else
3128 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3129 gimple new_stmt = gimple_build_call (fndecl, 0);
3130 gimple_set_location (new_stmt, gimple_location (stmt));
3131 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3133 tree var = create_tmp_var (TREE_TYPE (lhs));
3134 tree def = get_or_create_ssa_default_def (cfun, var);
3136 /* To satisfy condition for
3137 cgraph_update_edges_for_call_stmt_node,
3138 we need to preserve GIMPLE_CALL statement
3139 at position of GSI iterator. */
3140 update_call_from_tree (gsi, def);
3141 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3143 else
3145 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3146 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3147 gsi_replace (gsi, new_stmt, false);
3149 return true;
3155 /* Check for indirect calls that became direct calls, and then
3156 no longer require a static chain. */
3157 if (gimple_call_chain (stmt))
3159 tree fn = gimple_call_fndecl (stmt);
3160 if (fn && !DECL_STATIC_CHAIN (fn))
3162 gimple_call_set_chain (stmt, NULL);
3163 changed = true;
3165 else
3167 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3168 if (tmp)
3170 gimple_call_set_chain (stmt, tmp);
3171 changed = true;
3176 if (inplace)
3177 return changed;
3179 /* Check for builtins that CCP can handle using information not
3180 available in the generic fold routines. */
3181 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3183 if (gimple_fold_builtin (gsi))
3184 changed = true;
3186 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3188 changed |= targetm.gimple_fold_builtin (gsi);
3190 else if (gimple_call_internal_p (stmt))
3192 enum tree_code subcode = ERROR_MARK;
3193 tree result = NULL_TREE;
3194 bool cplx_result = false;
3195 tree overflow = NULL_TREE;
3196 switch (gimple_call_internal_fn (stmt))
3198 case IFN_BUILTIN_EXPECT:
3199 result = fold_builtin_expect (gimple_location (stmt),
3200 gimple_call_arg (stmt, 0),
3201 gimple_call_arg (stmt, 1),
3202 gimple_call_arg (stmt, 2));
3203 break;
3204 case IFN_UBSAN_OBJECT_SIZE:
3205 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3206 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3207 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3208 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3209 gimple_call_arg (stmt, 2))))
3211 gsi_replace (gsi, gimple_build_nop (), true);
3212 unlink_stmt_vdef (stmt);
3213 release_defs (stmt);
3214 return true;
3216 break;
3217 case IFN_UBSAN_CHECK_ADD:
3218 subcode = PLUS_EXPR;
3219 break;
3220 case IFN_UBSAN_CHECK_SUB:
3221 subcode = MINUS_EXPR;
3222 break;
3223 case IFN_UBSAN_CHECK_MUL:
3224 subcode = MULT_EXPR;
3225 break;
3226 case IFN_ADD_OVERFLOW:
3227 subcode = PLUS_EXPR;
3228 cplx_result = true;
3229 break;
3230 case IFN_SUB_OVERFLOW:
3231 subcode = MINUS_EXPR;
3232 cplx_result = true;
3233 break;
3234 case IFN_MUL_OVERFLOW:
3235 subcode = MULT_EXPR;
3236 cplx_result = true;
3237 break;
3238 default:
3239 break;
3241 if (subcode != ERROR_MARK)
3243 tree arg0 = gimple_call_arg (stmt, 0);
3244 tree arg1 = gimple_call_arg (stmt, 1);
3245 tree type = TREE_TYPE (arg0);
3246 if (cplx_result)
3248 tree lhs = gimple_call_lhs (stmt);
3249 if (lhs == NULL_TREE)
3250 type = NULL_TREE;
3251 else
3252 type = TREE_TYPE (TREE_TYPE (lhs));
3254 if (type == NULL_TREE)
3256 /* x = y + 0; x = y - 0; x = y * 0; */
3257 else if (integer_zerop (arg1))
3258 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3259 /* x = 0 + y; x = 0 * y; */
3260 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3261 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3262 /* x = y - y; */
3263 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3264 result = integer_zero_node;
3265 /* x = y * 1; x = 1 * y; */
3266 else if (subcode == MULT_EXPR && integer_onep (arg1))
3267 result = arg0;
3268 else if (subcode == MULT_EXPR && integer_onep (arg0))
3269 result = arg1;
3270 else if (TREE_CODE (arg0) == INTEGER_CST
3271 && TREE_CODE (arg1) == INTEGER_CST)
3273 if (cplx_result)
3274 result = int_const_binop (subcode, fold_convert (type, arg0),
3275 fold_convert (type, arg1));
3276 else
3277 result = int_const_binop (subcode, arg0, arg1);
3278 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3280 if (cplx_result)
3281 overflow = build_one_cst (type);
3282 else
3283 result = NULL_TREE;
3286 if (result)
3288 if (result == integer_zero_node)
3289 result = build_zero_cst (type);
3290 else if (cplx_result && TREE_TYPE (result) != type)
3292 if (TREE_CODE (result) == INTEGER_CST)
3294 if (arith_overflowed_p (PLUS_EXPR, type, result,
3295 integer_zero_node))
3296 overflow = build_one_cst (type);
3298 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3299 && TYPE_UNSIGNED (type))
3300 || (TYPE_PRECISION (type)
3301 < (TYPE_PRECISION (TREE_TYPE (result))
3302 + (TYPE_UNSIGNED (TREE_TYPE (result))
3303 && !TYPE_UNSIGNED (type)))))
3304 result = NULL_TREE;
3305 if (result)
3306 result = fold_convert (type, result);
3311 if (result)
3313 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3314 result = drop_tree_overflow (result);
3315 if (cplx_result)
3317 if (overflow == NULL_TREE)
3318 overflow = build_zero_cst (TREE_TYPE (result));
3319 tree ctype = build_complex_type (TREE_TYPE (result));
3320 if (TREE_CODE (result) == INTEGER_CST
3321 && TREE_CODE (overflow) == INTEGER_CST)
3322 result = build_complex (ctype, result, overflow);
3323 else
3324 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3325 ctype, result, overflow);
3327 if (!update_call_from_tree (gsi, result))
3328 gimplify_and_update_call_from_tree (gsi, result);
3329 changed = true;
3333 return changed;
3337 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3338 gimple_simplify.
3340 Replaces *GSI with the simplification result in RCODE and OPS
3341 and the associated statements in *SEQ. Does the replacement
3342 according to INPLACE and returns true if the operation succeeded. */
3344 static bool
3345 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3346 code_helper rcode, tree *ops,
3347 gimple_seq *seq, bool inplace)
3349 gimple stmt = gsi_stmt (*gsi);
3351 /* Play safe and do not allow abnormals to be mentioned in
3352 newly created statements. See also maybe_push_res_to_seq. */
3353 if ((TREE_CODE (ops[0]) == SSA_NAME
3354 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
3355 || (ops[1]
3356 && TREE_CODE (ops[1]) == SSA_NAME
3357 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
3358 || (ops[2]
3359 && TREE_CODE (ops[2]) == SSA_NAME
3360 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
3361 return false;
3363 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3365 gcc_assert (rcode.is_tree_code ());
3366 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3367 /* GIMPLE_CONDs condition may not throw. */
3368 && (!flag_exceptions
3369 || !cfun->can_throw_non_call_exceptions
3370 || !operation_could_trap_p (rcode,
3371 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3372 false, NULL_TREE)))
3373 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3374 else if (rcode == SSA_NAME)
3375 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3376 build_zero_cst (TREE_TYPE (ops[0])));
3377 else if (rcode == INTEGER_CST)
3379 if (integer_zerop (ops[0]))
3380 gimple_cond_make_false (cond_stmt);
3381 else
3382 gimple_cond_make_true (cond_stmt);
3384 else if (!inplace)
3386 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3387 ops, seq);
3388 if (!res)
3389 return false;
3390 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3391 build_zero_cst (TREE_TYPE (res)));
3393 else
3394 return false;
3395 if (dump_file && (dump_flags & TDF_DETAILS))
3397 fprintf (dump_file, "gimple_simplified to ");
3398 if (!gimple_seq_empty_p (*seq))
3399 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3400 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3401 0, TDF_SLIM);
3403 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3404 return true;
3406 else if (is_gimple_assign (stmt)
3407 && rcode.is_tree_code ())
3409 if (!inplace
3410 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3412 maybe_build_generic_op (rcode,
3413 TREE_TYPE (gimple_assign_lhs (stmt)),
3414 &ops[0], ops[1], ops[2]);
3415 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3416 if (dump_file && (dump_flags & TDF_DETAILS))
3418 fprintf (dump_file, "gimple_simplified to ");
3419 if (!gimple_seq_empty_p (*seq))
3420 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3421 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3422 0, TDF_SLIM);
3424 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3425 return true;
3428 else if (!inplace)
3430 if (gimple_has_lhs (stmt))
3432 tree lhs = gimple_get_lhs (stmt);
3433 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3434 ops, seq, lhs))
3435 return false;
3436 if (dump_file && (dump_flags & TDF_DETAILS))
3438 fprintf (dump_file, "gimple_simplified to ");
3439 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3441 gsi_replace_with_seq_vops (gsi, *seq);
3442 return true;
3444 else
3445 gcc_unreachable ();
3448 return false;
3451 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3453 static bool
3454 maybe_canonicalize_mem_ref_addr (tree *t)
3456 bool res = false;
3458 if (TREE_CODE (*t) == ADDR_EXPR)
3459 t = &TREE_OPERAND (*t, 0);
3461 while (handled_component_p (*t))
3462 t = &TREE_OPERAND (*t, 0);
3464 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3465 of invariant addresses into a SSA name MEM_REF address. */
3466 if (TREE_CODE (*t) == MEM_REF
3467 || TREE_CODE (*t) == TARGET_MEM_REF)
3469 tree addr = TREE_OPERAND (*t, 0);
3470 if (TREE_CODE (addr) == ADDR_EXPR
3471 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3472 || handled_component_p (TREE_OPERAND (addr, 0))))
3474 tree base;
3475 HOST_WIDE_INT coffset;
3476 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3477 &coffset);
3478 if (!base)
3479 gcc_unreachable ();
3481 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3482 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3483 TREE_OPERAND (*t, 1),
3484 size_int (coffset));
3485 res = true;
3487 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3488 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3491 /* Canonicalize back MEM_REFs to plain reference trees if the object
3492 accessed is a decl that has the same access semantics as the MEM_REF. */
3493 if (TREE_CODE (*t) == MEM_REF
3494 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3495 && integer_zerop (TREE_OPERAND (*t, 1))
3496 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3498 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3499 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3500 if (/* Same volatile qualification. */
3501 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3502 /* Same TBAA behavior with -fstrict-aliasing. */
3503 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3504 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3505 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3506 /* Same alignment. */
3507 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3508 /* We have to look out here to not drop a required conversion
3509 from the rhs to the lhs if *t appears on the lhs or vice-versa
3510 if it appears on the rhs. Thus require strict type
3511 compatibility. */
3512 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3514 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3515 res = true;
3519 /* Canonicalize TARGET_MEM_REF in particular with respect to
3520 the indexes becoming constant. */
3521 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3523 tree tem = maybe_fold_tmr (*t);
3524 if (tem)
3526 *t = tem;
3527 res = true;
3531 return res;
3534 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3535 distinguishes both cases. */
3537 static bool
3538 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3540 bool changed = false;
3541 gimple stmt = gsi_stmt (*gsi);
3542 unsigned i;
3544 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3545 after propagation.
3546 ??? This shouldn't be done in generic folding but in the
3547 propagation helpers which also know whether an address was
3548 propagated. */
3549 switch (gimple_code (stmt))
3551 case GIMPLE_ASSIGN:
3552 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3554 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3555 if ((REFERENCE_CLASS_P (*rhs)
3556 || TREE_CODE (*rhs) == ADDR_EXPR)
3557 && maybe_canonicalize_mem_ref_addr (rhs))
3558 changed = true;
3559 tree *lhs = gimple_assign_lhs_ptr (stmt);
3560 if (REFERENCE_CLASS_P (*lhs)
3561 && maybe_canonicalize_mem_ref_addr (lhs))
3562 changed = true;
3564 break;
3565 case GIMPLE_CALL:
3567 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3569 tree *arg = gimple_call_arg_ptr (stmt, i);
3570 if (REFERENCE_CLASS_P (*arg)
3571 && maybe_canonicalize_mem_ref_addr (arg))
3572 changed = true;
3574 tree *lhs = gimple_call_lhs_ptr (stmt);
3575 if (*lhs
3576 && REFERENCE_CLASS_P (*lhs)
3577 && maybe_canonicalize_mem_ref_addr (lhs))
3578 changed = true;
3579 break;
3581 case GIMPLE_ASM:
3583 gasm *asm_stmt = as_a <gasm *> (stmt);
3584 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3586 tree link = gimple_asm_output_op (asm_stmt, i);
3587 tree op = TREE_VALUE (link);
3588 if (REFERENCE_CLASS_P (op)
3589 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3590 changed = true;
3592 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3594 tree link = gimple_asm_input_op (asm_stmt, i);
3595 tree op = TREE_VALUE (link);
3596 if ((REFERENCE_CLASS_P (op)
3597 || TREE_CODE (op) == ADDR_EXPR)
3598 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3599 changed = true;
3602 break;
3603 case GIMPLE_DEBUG:
3604 if (gimple_debug_bind_p (stmt))
3606 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3607 if (*val
3608 && (REFERENCE_CLASS_P (*val)
3609 || TREE_CODE (*val) == ADDR_EXPR)
3610 && maybe_canonicalize_mem_ref_addr (val))
3611 changed = true;
3613 break;
3614 default:;
3617 /* Dispatch to pattern-based folding. */
3618 if (!inplace
3619 || is_gimple_assign (stmt)
3620 || gimple_code (stmt) == GIMPLE_COND)
3622 gimple_seq seq = NULL;
3623 code_helper rcode;
3624 tree ops[3] = {};
3625 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3626 valueize, valueize))
3628 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3629 changed = true;
3630 else
3631 gimple_seq_discard (seq);
3635 stmt = gsi_stmt (*gsi);
3637 /* Fold the main computation performed by the statement. */
3638 switch (gimple_code (stmt))
3640 case GIMPLE_ASSIGN:
3642 unsigned old_num_ops = gimple_num_ops (stmt);
3643 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3644 tree lhs = gimple_assign_lhs (stmt);
3645 tree new_rhs;
3646 /* First canonicalize operand order. This avoids building new
3647 trees if this is the only thing fold would later do. */
3648 if ((commutative_tree_code (subcode)
3649 || commutative_ternary_tree_code (subcode))
3650 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3651 gimple_assign_rhs2 (stmt), false))
3653 tree tem = gimple_assign_rhs1 (stmt);
3654 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3655 gimple_assign_set_rhs2 (stmt, tem);
3656 changed = true;
3658 new_rhs = fold_gimple_assign (gsi);
3659 if (new_rhs
3660 && !useless_type_conversion_p (TREE_TYPE (lhs),
3661 TREE_TYPE (new_rhs)))
3662 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3663 if (new_rhs
3664 && (!inplace
3665 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3667 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3668 changed = true;
3670 break;
3673 case GIMPLE_COND:
3674 changed |= fold_gimple_cond (as_a <gcond *> (stmt));
3675 break;
3677 case GIMPLE_CALL:
3678 changed |= gimple_fold_call (gsi, inplace);
3679 break;
3681 case GIMPLE_ASM:
3682 /* Fold *& in asm operands. */
3684 gasm *asm_stmt = as_a <gasm *> (stmt);
3685 size_t noutputs;
3686 const char **oconstraints;
3687 const char *constraint;
3688 bool allows_mem, allows_reg;
3690 noutputs = gimple_asm_noutputs (asm_stmt);
3691 oconstraints = XALLOCAVEC (const char *, noutputs);
3693 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3695 tree link = gimple_asm_output_op (asm_stmt, i);
3696 tree op = TREE_VALUE (link);
3697 oconstraints[i]
3698 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3699 if (REFERENCE_CLASS_P (op)
3700 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3702 TREE_VALUE (link) = op;
3703 changed = true;
3706 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3708 tree link = gimple_asm_input_op (asm_stmt, i);
3709 tree op = TREE_VALUE (link);
3710 constraint
3711 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3712 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3713 oconstraints, &allows_mem, &allows_reg);
3714 if (REFERENCE_CLASS_P (op)
3715 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3716 != NULL_TREE)
3718 TREE_VALUE (link) = op;
3719 changed = true;
3723 break;
3725 case GIMPLE_DEBUG:
3726 if (gimple_debug_bind_p (stmt))
3728 tree val = gimple_debug_bind_get_value (stmt);
3729 if (val
3730 && REFERENCE_CLASS_P (val))
3732 tree tem = maybe_fold_reference (val, false);
3733 if (tem)
3735 gimple_debug_bind_set_value (stmt, tem);
3736 changed = true;
3739 else if (val
3740 && TREE_CODE (val) == ADDR_EXPR)
3742 tree ref = TREE_OPERAND (val, 0);
3743 tree tem = maybe_fold_reference (ref, false);
3744 if (tem)
3746 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3747 gimple_debug_bind_set_value (stmt, tem);
3748 changed = true;
3752 break;
3754 default:;
3757 stmt = gsi_stmt (*gsi);
3759 /* Fold *& on the lhs. */
3760 if (gimple_has_lhs (stmt))
3762 tree lhs = gimple_get_lhs (stmt);
3763 if (lhs && REFERENCE_CLASS_P (lhs))
3765 tree new_lhs = maybe_fold_reference (lhs, true);
3766 if (new_lhs)
3768 gimple_set_lhs (stmt, new_lhs);
3769 changed = true;
3774 return changed;
3777 /* Valueziation callback that ends up not following SSA edges. */
3779 tree
3780 no_follow_ssa_edges (tree)
3782 return NULL_TREE;
3785 /* Valueization callback that ends up following single-use SSA edges only. */
3787 tree
3788 follow_single_use_edges (tree val)
3790 if (TREE_CODE (val) == SSA_NAME
3791 && !has_single_use (val))
3792 return NULL_TREE;
3793 return val;
3796 /* Fold the statement pointed to by GSI. In some cases, this function may
3797 replace the whole statement with a new one. Returns true iff folding
3798 makes any changes.
3799 The statement pointed to by GSI should be in valid gimple form but may
3800 be in unfolded state as resulting from for example constant propagation
3801 which can produce *&x = 0. */
3803 bool
3804 fold_stmt (gimple_stmt_iterator *gsi)
3806 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3809 bool
3810 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3812 return fold_stmt_1 (gsi, false, valueize);
3815 /* Perform the minimal folding on statement *GSI. Only operations like
3816 *&x created by constant propagation are handled. The statement cannot
3817 be replaced with a new one. Return true if the statement was
3818 changed, false otherwise.
3819 The statement *GSI should be in valid gimple form but may
3820 be in unfolded state as resulting from for example constant propagation
3821 which can produce *&x = 0. */
3823 bool
3824 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3826 gimple stmt = gsi_stmt (*gsi);
3827 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3828 gcc_assert (gsi_stmt (*gsi) == stmt);
3829 return changed;
3832 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3833 if EXPR is null or we don't know how.
3834 If non-null, the result always has boolean type. */
3836 static tree
3837 canonicalize_bool (tree expr, bool invert)
3839 if (!expr)
3840 return NULL_TREE;
3841 else if (invert)
3843 if (integer_nonzerop (expr))
3844 return boolean_false_node;
3845 else if (integer_zerop (expr))
3846 return boolean_true_node;
3847 else if (TREE_CODE (expr) == SSA_NAME)
3848 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3849 build_int_cst (TREE_TYPE (expr), 0));
3850 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3851 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3852 boolean_type_node,
3853 TREE_OPERAND (expr, 0),
3854 TREE_OPERAND (expr, 1));
3855 else
3856 return NULL_TREE;
3858 else
3860 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3861 return expr;
3862 if (integer_nonzerop (expr))
3863 return boolean_true_node;
3864 else if (integer_zerop (expr))
3865 return boolean_false_node;
3866 else if (TREE_CODE (expr) == SSA_NAME)
3867 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3868 build_int_cst (TREE_TYPE (expr), 0));
3869 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3870 return fold_build2 (TREE_CODE (expr),
3871 boolean_type_node,
3872 TREE_OPERAND (expr, 0),
3873 TREE_OPERAND (expr, 1));
3874 else
3875 return NULL_TREE;
3879 /* Check to see if a boolean expression EXPR is logically equivalent to the
3880 comparison (OP1 CODE OP2). Check for various identities involving
3881 SSA_NAMEs. */
3883 static bool
3884 same_bool_comparison_p (const_tree expr, enum tree_code code,
3885 const_tree op1, const_tree op2)
3887 gimple s;
3889 /* The obvious case. */
3890 if (TREE_CODE (expr) == code
3891 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3892 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3893 return true;
3895 /* Check for comparing (name, name != 0) and the case where expr
3896 is an SSA_NAME with a definition matching the comparison. */
3897 if (TREE_CODE (expr) == SSA_NAME
3898 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3900 if (operand_equal_p (expr, op1, 0))
3901 return ((code == NE_EXPR && integer_zerop (op2))
3902 || (code == EQ_EXPR && integer_nonzerop (op2)));
3903 s = SSA_NAME_DEF_STMT (expr);
3904 if (is_gimple_assign (s)
3905 && gimple_assign_rhs_code (s) == code
3906 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3907 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3908 return true;
3911 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3912 of name is a comparison, recurse. */
3913 if (TREE_CODE (op1) == SSA_NAME
3914 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3916 s = SSA_NAME_DEF_STMT (op1);
3917 if (is_gimple_assign (s)
3918 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3920 enum tree_code c = gimple_assign_rhs_code (s);
3921 if ((c == NE_EXPR && integer_zerop (op2))
3922 || (c == EQ_EXPR && integer_nonzerop (op2)))
3923 return same_bool_comparison_p (expr, c,
3924 gimple_assign_rhs1 (s),
3925 gimple_assign_rhs2 (s));
3926 if ((c == EQ_EXPR && integer_zerop (op2))
3927 || (c == NE_EXPR && integer_nonzerop (op2)))
3928 return same_bool_comparison_p (expr,
3929 invert_tree_comparison (c, false),
3930 gimple_assign_rhs1 (s),
3931 gimple_assign_rhs2 (s));
3934 return false;
3937 /* Check to see if two boolean expressions OP1 and OP2 are logically
3938 equivalent. */
3940 static bool
3941 same_bool_result_p (const_tree op1, const_tree op2)
3943 /* Simple cases first. */
3944 if (operand_equal_p (op1, op2, 0))
3945 return true;
3947 /* Check the cases where at least one of the operands is a comparison.
3948 These are a bit smarter than operand_equal_p in that they apply some
3949 identifies on SSA_NAMEs. */
3950 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3951 && same_bool_comparison_p (op1, TREE_CODE (op2),
3952 TREE_OPERAND (op2, 0),
3953 TREE_OPERAND (op2, 1)))
3954 return true;
3955 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3956 && same_bool_comparison_p (op2, TREE_CODE (op1),
3957 TREE_OPERAND (op1, 0),
3958 TREE_OPERAND (op1, 1)))
3959 return true;
3961 /* Default case. */
3962 return false;
3965 /* Forward declarations for some mutually recursive functions. */
3967 static tree
3968 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3969 enum tree_code code2, tree op2a, tree op2b);
3970 static tree
3971 and_var_with_comparison (tree var, bool invert,
3972 enum tree_code code2, tree op2a, tree op2b);
3973 static tree
3974 and_var_with_comparison_1 (gimple stmt,
3975 enum tree_code code2, tree op2a, tree op2b);
3976 static tree
3977 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3978 enum tree_code code2, tree op2a, tree op2b);
3979 static tree
3980 or_var_with_comparison (tree var, bool invert,
3981 enum tree_code code2, tree op2a, tree op2b);
3982 static tree
3983 or_var_with_comparison_1 (gimple stmt,
3984 enum tree_code code2, tree op2a, tree op2b);
3986 /* Helper function for and_comparisons_1: try to simplify the AND of the
3987 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3988 If INVERT is true, invert the value of the VAR before doing the AND.
3989 Return NULL_EXPR if we can't simplify this to a single expression. */
3991 static tree
3992 and_var_with_comparison (tree var, bool invert,
3993 enum tree_code code2, tree op2a, tree op2b)
3995 tree t;
3996 gimple stmt = SSA_NAME_DEF_STMT (var);
3998 /* We can only deal with variables whose definitions are assignments. */
3999 if (!is_gimple_assign (stmt))
4000 return NULL_TREE;
4002 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4003 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4004 Then we only have to consider the simpler non-inverted cases. */
4005 if (invert)
4006 t = or_var_with_comparison_1 (stmt,
4007 invert_tree_comparison (code2, false),
4008 op2a, op2b);
4009 else
4010 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4011 return canonicalize_bool (t, invert);
4014 /* Try to simplify the AND of the ssa variable defined by the assignment
4015 STMT with the comparison specified by (OP2A CODE2 OP2B).
4016 Return NULL_EXPR if we can't simplify this to a single expression. */
4018 static tree
4019 and_var_with_comparison_1 (gimple stmt,
4020 enum tree_code code2, tree op2a, tree op2b)
4022 tree var = gimple_assign_lhs (stmt);
4023 tree true_test_var = NULL_TREE;
4024 tree false_test_var = NULL_TREE;
4025 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4027 /* Check for identities like (var AND (var == 0)) => false. */
4028 if (TREE_CODE (op2a) == SSA_NAME
4029 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4031 if ((code2 == NE_EXPR && integer_zerop (op2b))
4032 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4034 true_test_var = op2a;
4035 if (var == true_test_var)
4036 return var;
4038 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4039 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4041 false_test_var = op2a;
4042 if (var == false_test_var)
4043 return boolean_false_node;
4047 /* If the definition is a comparison, recurse on it. */
4048 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4050 tree t = and_comparisons_1 (innercode,
4051 gimple_assign_rhs1 (stmt),
4052 gimple_assign_rhs2 (stmt),
4053 code2,
4054 op2a,
4055 op2b);
4056 if (t)
4057 return t;
4060 /* If the definition is an AND or OR expression, we may be able to
4061 simplify by reassociating. */
4062 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4063 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4065 tree inner1 = gimple_assign_rhs1 (stmt);
4066 tree inner2 = gimple_assign_rhs2 (stmt);
4067 gimple s;
4068 tree t;
4069 tree partial = NULL_TREE;
4070 bool is_and = (innercode == BIT_AND_EXPR);
4072 /* Check for boolean identities that don't require recursive examination
4073 of inner1/inner2:
4074 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4075 inner1 AND (inner1 OR inner2) => inner1
4076 !inner1 AND (inner1 AND inner2) => false
4077 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4078 Likewise for similar cases involving inner2. */
4079 if (inner1 == true_test_var)
4080 return (is_and ? var : inner1);
4081 else if (inner2 == true_test_var)
4082 return (is_and ? var : inner2);
4083 else if (inner1 == false_test_var)
4084 return (is_and
4085 ? boolean_false_node
4086 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4087 else if (inner2 == false_test_var)
4088 return (is_and
4089 ? boolean_false_node
4090 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4092 /* Next, redistribute/reassociate the AND across the inner tests.
4093 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4094 if (TREE_CODE (inner1) == SSA_NAME
4095 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4096 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4097 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4098 gimple_assign_rhs1 (s),
4099 gimple_assign_rhs2 (s),
4100 code2, op2a, op2b)))
4102 /* Handle the AND case, where we are reassociating:
4103 (inner1 AND inner2) AND (op2a code2 op2b)
4104 => (t AND inner2)
4105 If the partial result t is a constant, we win. Otherwise
4106 continue on to try reassociating with the other inner test. */
4107 if (is_and)
4109 if (integer_onep (t))
4110 return inner2;
4111 else if (integer_zerop (t))
4112 return boolean_false_node;
4115 /* Handle the OR case, where we are redistributing:
4116 (inner1 OR inner2) AND (op2a code2 op2b)
4117 => (t OR (inner2 AND (op2a code2 op2b))) */
4118 else if (integer_onep (t))
4119 return boolean_true_node;
4121 /* Save partial result for later. */
4122 partial = t;
4125 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4126 if (TREE_CODE (inner2) == SSA_NAME
4127 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4128 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4129 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4130 gimple_assign_rhs1 (s),
4131 gimple_assign_rhs2 (s),
4132 code2, op2a, op2b)))
4134 /* Handle the AND case, where we are reassociating:
4135 (inner1 AND inner2) AND (op2a code2 op2b)
4136 => (inner1 AND t) */
4137 if (is_and)
4139 if (integer_onep (t))
4140 return inner1;
4141 else if (integer_zerop (t))
4142 return boolean_false_node;
4143 /* If both are the same, we can apply the identity
4144 (x AND x) == x. */
4145 else if (partial && same_bool_result_p (t, partial))
4146 return t;
4149 /* Handle the OR case. where we are redistributing:
4150 (inner1 OR inner2) AND (op2a code2 op2b)
4151 => (t OR (inner1 AND (op2a code2 op2b)))
4152 => (t OR partial) */
4153 else
4155 if (integer_onep (t))
4156 return boolean_true_node;
4157 else if (partial)
4159 /* We already got a simplification for the other
4160 operand to the redistributed OR expression. The
4161 interesting case is when at least one is false.
4162 Or, if both are the same, we can apply the identity
4163 (x OR x) == x. */
4164 if (integer_zerop (partial))
4165 return t;
4166 else if (integer_zerop (t))
4167 return partial;
4168 else if (same_bool_result_p (t, partial))
4169 return t;
4174 return NULL_TREE;
4177 /* Try to simplify the AND of two comparisons defined by
4178 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4179 If this can be done without constructing an intermediate value,
4180 return the resulting tree; otherwise NULL_TREE is returned.
4181 This function is deliberately asymmetric as it recurses on SSA_DEFs
4182 in the first comparison but not the second. */
4184 static tree
4185 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4186 enum tree_code code2, tree op2a, tree op2b)
4188 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4190 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4191 if (operand_equal_p (op1a, op2a, 0)
4192 && operand_equal_p (op1b, op2b, 0))
4194 /* Result will be either NULL_TREE, or a combined comparison. */
4195 tree t = combine_comparisons (UNKNOWN_LOCATION,
4196 TRUTH_ANDIF_EXPR, code1, code2,
4197 truth_type, op1a, op1b);
4198 if (t)
4199 return t;
4202 /* Likewise the swapped case of the above. */
4203 if (operand_equal_p (op1a, op2b, 0)
4204 && operand_equal_p (op1b, op2a, 0))
4206 /* Result will be either NULL_TREE, or a combined comparison. */
4207 tree t = combine_comparisons (UNKNOWN_LOCATION,
4208 TRUTH_ANDIF_EXPR, code1,
4209 swap_tree_comparison (code2),
4210 truth_type, op1a, op1b);
4211 if (t)
4212 return t;
4215 /* If both comparisons are of the same value against constants, we might
4216 be able to merge them. */
4217 if (operand_equal_p (op1a, op2a, 0)
4218 && TREE_CODE (op1b) == INTEGER_CST
4219 && TREE_CODE (op2b) == INTEGER_CST)
4221 int cmp = tree_int_cst_compare (op1b, op2b);
4223 /* If we have (op1a == op1b), we should either be able to
4224 return that or FALSE, depending on whether the constant op1b
4225 also satisfies the other comparison against op2b. */
4226 if (code1 == EQ_EXPR)
4228 bool done = true;
4229 bool val;
4230 switch (code2)
4232 case EQ_EXPR: val = (cmp == 0); break;
4233 case NE_EXPR: val = (cmp != 0); break;
4234 case LT_EXPR: val = (cmp < 0); break;
4235 case GT_EXPR: val = (cmp > 0); break;
4236 case LE_EXPR: val = (cmp <= 0); break;
4237 case GE_EXPR: val = (cmp >= 0); break;
4238 default: done = false;
4240 if (done)
4242 if (val)
4243 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4244 else
4245 return boolean_false_node;
4248 /* Likewise if the second comparison is an == comparison. */
4249 else if (code2 == EQ_EXPR)
4251 bool done = true;
4252 bool val;
4253 switch (code1)
4255 case EQ_EXPR: val = (cmp == 0); break;
4256 case NE_EXPR: val = (cmp != 0); break;
4257 case LT_EXPR: val = (cmp > 0); break;
4258 case GT_EXPR: val = (cmp < 0); break;
4259 case LE_EXPR: val = (cmp >= 0); break;
4260 case GE_EXPR: val = (cmp <= 0); break;
4261 default: done = false;
4263 if (done)
4265 if (val)
4266 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4267 else
4268 return boolean_false_node;
4272 /* Same business with inequality tests. */
4273 else if (code1 == NE_EXPR)
4275 bool val;
4276 switch (code2)
4278 case EQ_EXPR: val = (cmp != 0); break;
4279 case NE_EXPR: val = (cmp == 0); break;
4280 case LT_EXPR: val = (cmp >= 0); break;
4281 case GT_EXPR: val = (cmp <= 0); break;
4282 case LE_EXPR: val = (cmp > 0); break;
4283 case GE_EXPR: val = (cmp < 0); break;
4284 default:
4285 val = false;
4287 if (val)
4288 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4290 else if (code2 == NE_EXPR)
4292 bool val;
4293 switch (code1)
4295 case EQ_EXPR: val = (cmp == 0); break;
4296 case NE_EXPR: val = (cmp != 0); break;
4297 case LT_EXPR: val = (cmp <= 0); break;
4298 case GT_EXPR: val = (cmp >= 0); break;
4299 case LE_EXPR: val = (cmp < 0); break;
4300 case GE_EXPR: val = (cmp > 0); break;
4301 default:
4302 val = false;
4304 if (val)
4305 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4308 /* Chose the more restrictive of two < or <= comparisons. */
4309 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4310 && (code2 == LT_EXPR || code2 == LE_EXPR))
4312 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4313 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4314 else
4315 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4318 /* Likewise chose the more restrictive of two > or >= comparisons. */
4319 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4320 && (code2 == GT_EXPR || code2 == GE_EXPR))
4322 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4323 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4324 else
4325 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4328 /* Check for singleton ranges. */
4329 else if (cmp == 0
4330 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4331 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4332 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4334 /* Check for disjoint ranges. */
4335 else if (cmp <= 0
4336 && (code1 == LT_EXPR || code1 == LE_EXPR)
4337 && (code2 == GT_EXPR || code2 == GE_EXPR))
4338 return boolean_false_node;
4339 else if (cmp >= 0
4340 && (code1 == GT_EXPR || code1 == GE_EXPR)
4341 && (code2 == LT_EXPR || code2 == LE_EXPR))
4342 return boolean_false_node;
4345 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4346 NAME's definition is a truth value. See if there are any simplifications
4347 that can be done against the NAME's definition. */
4348 if (TREE_CODE (op1a) == SSA_NAME
4349 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4350 && (integer_zerop (op1b) || integer_onep (op1b)))
4352 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4353 || (code1 == NE_EXPR && integer_onep (op1b)));
4354 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4355 switch (gimple_code (stmt))
4357 case GIMPLE_ASSIGN:
4358 /* Try to simplify by copy-propagating the definition. */
4359 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4361 case GIMPLE_PHI:
4362 /* If every argument to the PHI produces the same result when
4363 ANDed with the second comparison, we win.
4364 Do not do this unless the type is bool since we need a bool
4365 result here anyway. */
4366 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4368 tree result = NULL_TREE;
4369 unsigned i;
4370 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4372 tree arg = gimple_phi_arg_def (stmt, i);
4374 /* If this PHI has itself as an argument, ignore it.
4375 If all the other args produce the same result,
4376 we're still OK. */
4377 if (arg == gimple_phi_result (stmt))
4378 continue;
4379 else if (TREE_CODE (arg) == INTEGER_CST)
4381 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4383 if (!result)
4384 result = boolean_false_node;
4385 else if (!integer_zerop (result))
4386 return NULL_TREE;
4388 else if (!result)
4389 result = fold_build2 (code2, boolean_type_node,
4390 op2a, op2b);
4391 else if (!same_bool_comparison_p (result,
4392 code2, op2a, op2b))
4393 return NULL_TREE;
4395 else if (TREE_CODE (arg) == SSA_NAME
4396 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4398 tree temp;
4399 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4400 /* In simple cases we can look through PHI nodes,
4401 but we have to be careful with loops.
4402 See PR49073. */
4403 if (! dom_info_available_p (CDI_DOMINATORS)
4404 || gimple_bb (def_stmt) == gimple_bb (stmt)
4405 || dominated_by_p (CDI_DOMINATORS,
4406 gimple_bb (def_stmt),
4407 gimple_bb (stmt)))
4408 return NULL_TREE;
4409 temp = and_var_with_comparison (arg, invert, code2,
4410 op2a, op2b);
4411 if (!temp)
4412 return NULL_TREE;
4413 else if (!result)
4414 result = temp;
4415 else if (!same_bool_result_p (result, temp))
4416 return NULL_TREE;
4418 else
4419 return NULL_TREE;
4421 return result;
4424 default:
4425 break;
4428 return NULL_TREE;
4431 /* Try to simplify the AND of two comparisons, specified by
4432 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4433 If this can be simplified to a single expression (without requiring
4434 introducing more SSA variables to hold intermediate values),
4435 return the resulting tree. Otherwise return NULL_TREE.
4436 If the result expression is non-null, it has boolean type. */
4438 tree
4439 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4440 enum tree_code code2, tree op2a, tree op2b)
4442 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4443 if (t)
4444 return t;
4445 else
4446 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4449 /* Helper function for or_comparisons_1: try to simplify the OR of the
4450 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4451 If INVERT is true, invert the value of VAR before doing the OR.
4452 Return NULL_EXPR if we can't simplify this to a single expression. */
4454 static tree
4455 or_var_with_comparison (tree var, bool invert,
4456 enum tree_code code2, tree op2a, tree op2b)
4458 tree t;
4459 gimple stmt = SSA_NAME_DEF_STMT (var);
4461 /* We can only deal with variables whose definitions are assignments. */
4462 if (!is_gimple_assign (stmt))
4463 return NULL_TREE;
4465 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4466 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4467 Then we only have to consider the simpler non-inverted cases. */
4468 if (invert)
4469 t = and_var_with_comparison_1 (stmt,
4470 invert_tree_comparison (code2, false),
4471 op2a, op2b);
4472 else
4473 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4474 return canonicalize_bool (t, invert);
4477 /* Try to simplify the OR of the ssa variable defined by the assignment
4478 STMT with the comparison specified by (OP2A CODE2 OP2B).
4479 Return NULL_EXPR if we can't simplify this to a single expression. */
4481 static tree
4482 or_var_with_comparison_1 (gimple stmt,
4483 enum tree_code code2, tree op2a, tree op2b)
4485 tree var = gimple_assign_lhs (stmt);
4486 tree true_test_var = NULL_TREE;
4487 tree false_test_var = NULL_TREE;
4488 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4490 /* Check for identities like (var OR (var != 0)) => true . */
4491 if (TREE_CODE (op2a) == SSA_NAME
4492 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4494 if ((code2 == NE_EXPR && integer_zerop (op2b))
4495 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4497 true_test_var = op2a;
4498 if (var == true_test_var)
4499 return var;
4501 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4502 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4504 false_test_var = op2a;
4505 if (var == false_test_var)
4506 return boolean_true_node;
4510 /* If the definition is a comparison, recurse on it. */
4511 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4513 tree t = or_comparisons_1 (innercode,
4514 gimple_assign_rhs1 (stmt),
4515 gimple_assign_rhs2 (stmt),
4516 code2,
4517 op2a,
4518 op2b);
4519 if (t)
4520 return t;
4523 /* If the definition is an AND or OR expression, we may be able to
4524 simplify by reassociating. */
4525 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4526 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4528 tree inner1 = gimple_assign_rhs1 (stmt);
4529 tree inner2 = gimple_assign_rhs2 (stmt);
4530 gimple s;
4531 tree t;
4532 tree partial = NULL_TREE;
4533 bool is_or = (innercode == BIT_IOR_EXPR);
4535 /* Check for boolean identities that don't require recursive examination
4536 of inner1/inner2:
4537 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4538 inner1 OR (inner1 AND inner2) => inner1
4539 !inner1 OR (inner1 OR inner2) => true
4540 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4542 if (inner1 == true_test_var)
4543 return (is_or ? var : inner1);
4544 else if (inner2 == true_test_var)
4545 return (is_or ? var : inner2);
4546 else if (inner1 == false_test_var)
4547 return (is_or
4548 ? boolean_true_node
4549 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4550 else if (inner2 == false_test_var)
4551 return (is_or
4552 ? boolean_true_node
4553 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4555 /* Next, redistribute/reassociate the OR across the inner tests.
4556 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4557 if (TREE_CODE (inner1) == SSA_NAME
4558 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4559 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4560 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4561 gimple_assign_rhs1 (s),
4562 gimple_assign_rhs2 (s),
4563 code2, op2a, op2b)))
4565 /* Handle the OR case, where we are reassociating:
4566 (inner1 OR inner2) OR (op2a code2 op2b)
4567 => (t OR inner2)
4568 If the partial result t is a constant, we win. Otherwise
4569 continue on to try reassociating with the other inner test. */
4570 if (is_or)
4572 if (integer_onep (t))
4573 return boolean_true_node;
4574 else if (integer_zerop (t))
4575 return inner2;
4578 /* Handle the AND case, where we are redistributing:
4579 (inner1 AND inner2) OR (op2a code2 op2b)
4580 => (t AND (inner2 OR (op2a code op2b))) */
4581 else if (integer_zerop (t))
4582 return boolean_false_node;
4584 /* Save partial result for later. */
4585 partial = t;
4588 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4589 if (TREE_CODE (inner2) == SSA_NAME
4590 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4591 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4592 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4593 gimple_assign_rhs1 (s),
4594 gimple_assign_rhs2 (s),
4595 code2, op2a, op2b)))
4597 /* Handle the OR case, where we are reassociating:
4598 (inner1 OR inner2) OR (op2a code2 op2b)
4599 => (inner1 OR t)
4600 => (t OR partial) */
4601 if (is_or)
4603 if (integer_zerop (t))
4604 return inner1;
4605 else if (integer_onep (t))
4606 return boolean_true_node;
4607 /* If both are the same, we can apply the identity
4608 (x OR x) == x. */
4609 else if (partial && same_bool_result_p (t, partial))
4610 return t;
4613 /* Handle the AND case, where we are redistributing:
4614 (inner1 AND inner2) OR (op2a code2 op2b)
4615 => (t AND (inner1 OR (op2a code2 op2b)))
4616 => (t AND partial) */
4617 else
4619 if (integer_zerop (t))
4620 return boolean_false_node;
4621 else if (partial)
4623 /* We already got a simplification for the other
4624 operand to the redistributed AND expression. The
4625 interesting case is when at least one is true.
4626 Or, if both are the same, we can apply the identity
4627 (x AND x) == x. */
4628 if (integer_onep (partial))
4629 return t;
4630 else if (integer_onep (t))
4631 return partial;
4632 else if (same_bool_result_p (t, partial))
4633 return t;
4638 return NULL_TREE;
4641 /* Try to simplify the OR of two comparisons defined by
4642 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4643 If this can be done without constructing an intermediate value,
4644 return the resulting tree; otherwise NULL_TREE is returned.
4645 This function is deliberately asymmetric as it recurses on SSA_DEFs
4646 in the first comparison but not the second. */
4648 static tree
4649 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4650 enum tree_code code2, tree op2a, tree op2b)
4652 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4654 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4655 if (operand_equal_p (op1a, op2a, 0)
4656 && operand_equal_p (op1b, op2b, 0))
4658 /* Result will be either NULL_TREE, or a combined comparison. */
4659 tree t = combine_comparisons (UNKNOWN_LOCATION,
4660 TRUTH_ORIF_EXPR, code1, code2,
4661 truth_type, op1a, op1b);
4662 if (t)
4663 return t;
4666 /* Likewise the swapped case of the above. */
4667 if (operand_equal_p (op1a, op2b, 0)
4668 && operand_equal_p (op1b, op2a, 0))
4670 /* Result will be either NULL_TREE, or a combined comparison. */
4671 tree t = combine_comparisons (UNKNOWN_LOCATION,
4672 TRUTH_ORIF_EXPR, code1,
4673 swap_tree_comparison (code2),
4674 truth_type, op1a, op1b);
4675 if (t)
4676 return t;
4679 /* If both comparisons are of the same value against constants, we might
4680 be able to merge them. */
4681 if (operand_equal_p (op1a, op2a, 0)
4682 && TREE_CODE (op1b) == INTEGER_CST
4683 && TREE_CODE (op2b) == INTEGER_CST)
4685 int cmp = tree_int_cst_compare (op1b, op2b);
4687 /* If we have (op1a != op1b), we should either be able to
4688 return that or TRUE, depending on whether the constant op1b
4689 also satisfies the other comparison against op2b. */
4690 if (code1 == NE_EXPR)
4692 bool done = true;
4693 bool val;
4694 switch (code2)
4696 case EQ_EXPR: val = (cmp == 0); break;
4697 case NE_EXPR: val = (cmp != 0); break;
4698 case LT_EXPR: val = (cmp < 0); break;
4699 case GT_EXPR: val = (cmp > 0); break;
4700 case LE_EXPR: val = (cmp <= 0); break;
4701 case GE_EXPR: val = (cmp >= 0); break;
4702 default: done = false;
4704 if (done)
4706 if (val)
4707 return boolean_true_node;
4708 else
4709 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4712 /* Likewise if the second comparison is a != comparison. */
4713 else if (code2 == NE_EXPR)
4715 bool done = true;
4716 bool val;
4717 switch (code1)
4719 case EQ_EXPR: val = (cmp == 0); break;
4720 case NE_EXPR: val = (cmp != 0); break;
4721 case LT_EXPR: val = (cmp > 0); break;
4722 case GT_EXPR: val = (cmp < 0); break;
4723 case LE_EXPR: val = (cmp >= 0); break;
4724 case GE_EXPR: val = (cmp <= 0); break;
4725 default: done = false;
4727 if (done)
4729 if (val)
4730 return boolean_true_node;
4731 else
4732 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4736 /* See if an equality test is redundant with the other comparison. */
4737 else if (code1 == EQ_EXPR)
4739 bool val;
4740 switch (code2)
4742 case EQ_EXPR: val = (cmp == 0); break;
4743 case NE_EXPR: val = (cmp != 0); break;
4744 case LT_EXPR: val = (cmp < 0); break;
4745 case GT_EXPR: val = (cmp > 0); break;
4746 case LE_EXPR: val = (cmp <= 0); break;
4747 case GE_EXPR: val = (cmp >= 0); break;
4748 default:
4749 val = false;
4751 if (val)
4752 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4754 else if (code2 == EQ_EXPR)
4756 bool val;
4757 switch (code1)
4759 case EQ_EXPR: val = (cmp == 0); break;
4760 case NE_EXPR: val = (cmp != 0); break;
4761 case LT_EXPR: val = (cmp > 0); break;
4762 case GT_EXPR: val = (cmp < 0); break;
4763 case LE_EXPR: val = (cmp >= 0); break;
4764 case GE_EXPR: val = (cmp <= 0); break;
4765 default:
4766 val = false;
4768 if (val)
4769 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4772 /* Chose the less restrictive of two < or <= comparisons. */
4773 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4774 && (code2 == LT_EXPR || code2 == LE_EXPR))
4776 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4777 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4778 else
4779 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4782 /* Likewise chose the less restrictive of two > or >= comparisons. */
4783 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4784 && (code2 == GT_EXPR || code2 == GE_EXPR))
4786 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4787 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4788 else
4789 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4792 /* Check for singleton ranges. */
4793 else if (cmp == 0
4794 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4795 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4796 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4798 /* Check for less/greater pairs that don't restrict the range at all. */
4799 else if (cmp >= 0
4800 && (code1 == LT_EXPR || code1 == LE_EXPR)
4801 && (code2 == GT_EXPR || code2 == GE_EXPR))
4802 return boolean_true_node;
4803 else if (cmp <= 0
4804 && (code1 == GT_EXPR || code1 == GE_EXPR)
4805 && (code2 == LT_EXPR || code2 == LE_EXPR))
4806 return boolean_true_node;
4809 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4810 NAME's definition is a truth value. See if there are any simplifications
4811 that can be done against the NAME's definition. */
4812 if (TREE_CODE (op1a) == SSA_NAME
4813 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4814 && (integer_zerop (op1b) || integer_onep (op1b)))
4816 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4817 || (code1 == NE_EXPR && integer_onep (op1b)));
4818 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4819 switch (gimple_code (stmt))
4821 case GIMPLE_ASSIGN:
4822 /* Try to simplify by copy-propagating the definition. */
4823 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4825 case GIMPLE_PHI:
4826 /* If every argument to the PHI produces the same result when
4827 ORed with the second comparison, we win.
4828 Do not do this unless the type is bool since we need a bool
4829 result here anyway. */
4830 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4832 tree result = NULL_TREE;
4833 unsigned i;
4834 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4836 tree arg = gimple_phi_arg_def (stmt, i);
4838 /* If this PHI has itself as an argument, ignore it.
4839 If all the other args produce the same result,
4840 we're still OK. */
4841 if (arg == gimple_phi_result (stmt))
4842 continue;
4843 else if (TREE_CODE (arg) == INTEGER_CST)
4845 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4847 if (!result)
4848 result = boolean_true_node;
4849 else if (!integer_onep (result))
4850 return NULL_TREE;
4852 else if (!result)
4853 result = fold_build2 (code2, boolean_type_node,
4854 op2a, op2b);
4855 else if (!same_bool_comparison_p (result,
4856 code2, op2a, op2b))
4857 return NULL_TREE;
4859 else if (TREE_CODE (arg) == SSA_NAME
4860 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4862 tree temp;
4863 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4864 /* In simple cases we can look through PHI nodes,
4865 but we have to be careful with loops.
4866 See PR49073. */
4867 if (! dom_info_available_p (CDI_DOMINATORS)
4868 || gimple_bb (def_stmt) == gimple_bb (stmt)
4869 || dominated_by_p (CDI_DOMINATORS,
4870 gimple_bb (def_stmt),
4871 gimple_bb (stmt)))
4872 return NULL_TREE;
4873 temp = or_var_with_comparison (arg, invert, code2,
4874 op2a, op2b);
4875 if (!temp)
4876 return NULL_TREE;
4877 else if (!result)
4878 result = temp;
4879 else if (!same_bool_result_p (result, temp))
4880 return NULL_TREE;
4882 else
4883 return NULL_TREE;
4885 return result;
4888 default:
4889 break;
4892 return NULL_TREE;
4895 /* Try to simplify the OR of two comparisons, specified by
4896 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4897 If this can be simplified to a single expression (without requiring
4898 introducing more SSA variables to hold intermediate values),
4899 return the resulting tree. Otherwise return NULL_TREE.
4900 If the result expression is non-null, it has boolean type. */
4902 tree
4903 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4904 enum tree_code code2, tree op2a, tree op2b)
4906 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4907 if (t)
4908 return t;
4909 else
4910 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4914 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4916 Either NULL_TREE, a simplified but non-constant or a constant
4917 is returned.
4919 ??? This should go into a gimple-fold-inline.h file to be eventually
4920 privatized with the single valueize function used in the various TUs
4921 to avoid the indirect function call overhead. */
4923 tree
4924 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4925 tree (*gvalueize) (tree))
4927 code_helper rcode;
4928 tree ops[3] = {};
4929 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4930 edges if there are intermediate VARYING defs. For this reason
4931 do not follow SSA edges here even though SCCVN can technically
4932 just deal fine with that. */
4933 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize)
4934 && rcode.is_tree_code ()
4935 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4936 || ((tree_code) rcode) == ADDR_EXPR)
4937 && is_gimple_val (ops[0]))
4939 tree res = ops[0];
4940 if (dump_file && dump_flags & TDF_DETAILS)
4942 fprintf (dump_file, "Match-and-simplified ");
4943 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4944 fprintf (dump_file, " to ");
4945 print_generic_expr (dump_file, res, 0);
4946 fprintf (dump_file, "\n");
4948 return res;
4951 location_t loc = gimple_location (stmt);
4952 switch (gimple_code (stmt))
4954 case GIMPLE_ASSIGN:
4956 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4958 switch (get_gimple_rhs_class (subcode))
4960 case GIMPLE_SINGLE_RHS:
4962 tree rhs = gimple_assign_rhs1 (stmt);
4963 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4965 if (TREE_CODE (rhs) == SSA_NAME)
4967 /* If the RHS is an SSA_NAME, return its known constant value,
4968 if any. */
4969 return (*valueize) (rhs);
4971 /* Handle propagating invariant addresses into address
4972 operations. */
4973 else if (TREE_CODE (rhs) == ADDR_EXPR
4974 && !is_gimple_min_invariant (rhs))
4976 HOST_WIDE_INT offset = 0;
4977 tree base;
4978 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4979 &offset,
4980 valueize);
4981 if (base
4982 && (CONSTANT_CLASS_P (base)
4983 || decl_address_invariant_p (base)))
4984 return build_invariant_address (TREE_TYPE (rhs),
4985 base, offset);
4987 else if (TREE_CODE (rhs) == CONSTRUCTOR
4988 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4989 && (CONSTRUCTOR_NELTS (rhs)
4990 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4992 unsigned i;
4993 tree val, *vec;
4995 vec = XALLOCAVEC (tree,
4996 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4997 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4999 val = (*valueize) (val);
5000 if (TREE_CODE (val) == INTEGER_CST
5001 || TREE_CODE (val) == REAL_CST
5002 || TREE_CODE (val) == FIXED_CST)
5003 vec[i] = val;
5004 else
5005 return NULL_TREE;
5008 return build_vector (TREE_TYPE (rhs), vec);
5010 if (subcode == OBJ_TYPE_REF)
5012 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5013 /* If callee is constant, we can fold away the wrapper. */
5014 if (is_gimple_min_invariant (val))
5015 return val;
5018 if (kind == tcc_reference)
5020 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5021 || TREE_CODE (rhs) == REALPART_EXPR
5022 || TREE_CODE (rhs) == IMAGPART_EXPR)
5023 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5025 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5026 return fold_unary_loc (EXPR_LOCATION (rhs),
5027 TREE_CODE (rhs),
5028 TREE_TYPE (rhs), val);
5030 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5031 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5033 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5034 return fold_ternary_loc (EXPR_LOCATION (rhs),
5035 TREE_CODE (rhs),
5036 TREE_TYPE (rhs), val,
5037 TREE_OPERAND (rhs, 1),
5038 TREE_OPERAND (rhs, 2));
5040 else if (TREE_CODE (rhs) == MEM_REF
5041 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5043 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5044 if (TREE_CODE (val) == ADDR_EXPR
5045 && is_gimple_min_invariant (val))
5047 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5048 unshare_expr (val),
5049 TREE_OPERAND (rhs, 1));
5050 if (tem)
5051 rhs = tem;
5054 return fold_const_aggregate_ref_1 (rhs, valueize);
5056 else if (kind == tcc_declaration)
5057 return get_symbol_constant_value (rhs);
5058 return rhs;
5061 case GIMPLE_UNARY_RHS:
5062 return NULL_TREE;
5064 case GIMPLE_BINARY_RHS:
5066 /* Handle binary operators that can appear in GIMPLE form. */
5067 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5068 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5070 /* Translate &x + CST into an invariant form suitable for
5071 further propagation. */
5072 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
5073 && TREE_CODE (op0) == ADDR_EXPR
5074 && TREE_CODE (op1) == INTEGER_CST)
5076 tree off = fold_convert (ptr_type_node, op1);
5077 return build_fold_addr_expr_loc
5078 (loc,
5079 fold_build2 (MEM_REF,
5080 TREE_TYPE (TREE_TYPE (op0)),
5081 unshare_expr (op0), off));
5084 return fold_binary_loc (loc, subcode,
5085 gimple_expr_type (stmt), op0, op1);
5088 case GIMPLE_TERNARY_RHS:
5090 /* Handle ternary operators that can appear in GIMPLE form. */
5091 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5092 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5093 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5095 /* Fold embedded expressions in ternary codes. */
5096 if ((subcode == COND_EXPR
5097 || subcode == VEC_COND_EXPR)
5098 && COMPARISON_CLASS_P (op0))
5100 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
5101 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
5102 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
5103 TREE_TYPE (op0), op00, op01);
5104 if (tem)
5105 op0 = tem;
5108 return fold_ternary_loc (loc, subcode,
5109 gimple_expr_type (stmt), op0, op1, op2);
5112 default:
5113 gcc_unreachable ();
5117 case GIMPLE_CALL:
5119 tree fn;
5120 gcall *call_stmt = as_a <gcall *> (stmt);
5122 if (gimple_call_internal_p (stmt))
5124 enum tree_code subcode = ERROR_MARK;
5125 switch (gimple_call_internal_fn (stmt))
5127 case IFN_UBSAN_CHECK_ADD:
5128 subcode = PLUS_EXPR;
5129 break;
5130 case IFN_UBSAN_CHECK_SUB:
5131 subcode = MINUS_EXPR;
5132 break;
5133 case IFN_UBSAN_CHECK_MUL:
5134 subcode = MULT_EXPR;
5135 break;
5136 default:
5137 return NULL_TREE;
5139 tree arg0 = gimple_call_arg (stmt, 0);
5140 tree arg1 = gimple_call_arg (stmt, 1);
5141 tree op0 = (*valueize) (arg0);
5142 tree op1 = (*valueize) (arg1);
5144 if (TREE_CODE (op0) != INTEGER_CST
5145 || TREE_CODE (op1) != INTEGER_CST)
5147 switch (subcode)
5149 case MULT_EXPR:
5150 /* x * 0 = 0 * x = 0 without overflow. */
5151 if (integer_zerop (op0) || integer_zerop (op1))
5152 return build_zero_cst (TREE_TYPE (arg0));
5153 break;
5154 case MINUS_EXPR:
5155 /* y - y = 0 without overflow. */
5156 if (operand_equal_p (op0, op1, 0))
5157 return build_zero_cst (TREE_TYPE (arg0));
5158 break;
5159 default:
5160 break;
5163 tree res
5164 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5165 if (res
5166 && TREE_CODE (res) == INTEGER_CST
5167 && !TREE_OVERFLOW (res))
5168 return res;
5169 return NULL_TREE;
5172 fn = (*valueize) (gimple_call_fn (stmt));
5173 if (TREE_CODE (fn) == ADDR_EXPR
5174 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5175 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5176 && gimple_builtin_call_types_compatible_p (stmt,
5177 TREE_OPERAND (fn, 0)))
5179 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5180 tree retval;
5181 unsigned i;
5182 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5183 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5184 retval = fold_builtin_call_array (loc,
5185 gimple_call_return_type (call_stmt),
5186 fn, gimple_call_num_args (stmt), args);
5187 if (retval)
5189 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5190 STRIP_NOPS (retval);
5191 retval = fold_convert (gimple_call_return_type (call_stmt),
5192 retval);
5194 return retval;
5196 return NULL_TREE;
5199 default:
5200 return NULL_TREE;
5204 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5205 Returns NULL_TREE if folding to a constant is not possible, otherwise
5206 returns a constant according to is_gimple_min_invariant. */
5208 tree
5209 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
5211 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5212 if (res && is_gimple_min_invariant (res))
5213 return res;
5214 return NULL_TREE;
5218 /* The following set of functions are supposed to fold references using
5219 their constant initializers. */
5221 /* See if we can find constructor defining value of BASE.
5222 When we know the consructor with constant offset (such as
5223 base is array[40] and we do know constructor of array), then
5224 BIT_OFFSET is adjusted accordingly.
5226 As a special case, return error_mark_node when constructor
5227 is not explicitly available, but it is known to be zero
5228 such as 'static const int a;'. */
5229 static tree
5230 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5231 tree (*valueize)(tree))
5233 HOST_WIDE_INT bit_offset2, size, max_size;
5234 bool reverse;
5236 if (TREE_CODE (base) == MEM_REF)
5238 if (!integer_zerop (TREE_OPERAND (base, 1)))
5240 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5241 return NULL_TREE;
5242 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5243 * BITS_PER_UNIT);
5246 if (valueize
5247 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5248 base = valueize (TREE_OPERAND (base, 0));
5249 if (!base || TREE_CODE (base) != ADDR_EXPR)
5250 return NULL_TREE;
5251 base = TREE_OPERAND (base, 0);
5254 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5255 DECL_INITIAL. If BASE is a nested reference into another
5256 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5257 the inner reference. */
5258 switch (TREE_CODE (base))
5260 case VAR_DECL:
5261 case CONST_DECL:
5263 tree init = ctor_for_folding (base);
5265 /* Our semantic is exact opposite of ctor_for_folding;
5266 NULL means unknown, while error_mark_node is 0. */
5267 if (init == error_mark_node)
5268 return NULL_TREE;
5269 if (!init)
5270 return error_mark_node;
5271 return init;
5274 case ARRAY_REF:
5275 case COMPONENT_REF:
5276 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5277 &reverse);
5278 if (max_size == -1 || size != max_size)
5279 return NULL_TREE;
5280 *bit_offset += bit_offset2;
5281 return get_base_constructor (base, bit_offset, valueize);
5283 case STRING_CST:
5284 case CONSTRUCTOR:
5285 return base;
5287 default:
5288 return NULL_TREE;
5292 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5293 SIZE to the memory at bit OFFSET. */
5295 static tree
5296 fold_array_ctor_reference (tree type, tree ctor,
5297 unsigned HOST_WIDE_INT offset,
5298 unsigned HOST_WIDE_INT size,
5299 tree from_decl)
5301 unsigned HOST_WIDE_INT cnt;
5302 tree cfield, cval;
5303 offset_int low_bound;
5304 offset_int elt_size;
5305 offset_int index, max_index;
5306 offset_int access_index;
5307 tree domain_type = NULL_TREE, index_type = NULL_TREE;
5308 HOST_WIDE_INT inner_offset;
5310 /* Compute low bound and elt size. */
5311 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5312 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5313 if (domain_type && TYPE_MIN_VALUE (domain_type))
5315 /* Static constructors for variably sized objects makes no sense. */
5316 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5317 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5318 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5320 else
5321 low_bound = 0;
5322 /* Static constructors for variably sized objects makes no sense. */
5323 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5324 == INTEGER_CST);
5325 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5327 /* We can handle only constantly sized accesses that are known to not
5328 be larger than size of array element. */
5329 if (!TYPE_SIZE_UNIT (type)
5330 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5331 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5332 || elt_size == 0)
5333 return NULL_TREE;
5335 /* Compute the array index we look for. */
5336 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5337 elt_size);
5338 access_index += low_bound;
5339 if (index_type)
5340 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5341 TYPE_SIGN (index_type));
5343 /* And offset within the access. */
5344 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5346 /* See if the array field is large enough to span whole access. We do not
5347 care to fold accesses spanning multiple array indexes. */
5348 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5349 return NULL_TREE;
5351 index = low_bound - 1;
5352 if (index_type)
5353 index = wi::ext (index, TYPE_PRECISION (index_type),
5354 TYPE_SIGN (index_type));
5356 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5358 /* Array constructor might explicitely set index, or specify range
5359 or leave index NULL meaning that it is next index after previous
5360 one. */
5361 if (cfield)
5363 if (TREE_CODE (cfield) == INTEGER_CST)
5364 max_index = index = wi::to_offset (cfield);
5365 else
5367 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5368 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5369 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
5372 else
5374 index += 1;
5375 if (index_type)
5376 index = wi::ext (index, TYPE_PRECISION (index_type),
5377 TYPE_SIGN (index_type));
5378 max_index = index;
5381 /* Do we have match? */
5382 if (wi::cmpu (access_index, index) >= 0
5383 && wi::cmpu (access_index, max_index) <= 0)
5384 return fold_ctor_reference (type, cval, inner_offset, size,
5385 from_decl);
5387 /* When memory is not explicitely mentioned in constructor,
5388 it is 0 (or out of range). */
5389 return build_zero_cst (type);
5392 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5393 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5395 static tree
5396 fold_nonarray_ctor_reference (tree type, tree ctor,
5397 unsigned HOST_WIDE_INT offset,
5398 unsigned HOST_WIDE_INT size,
5399 tree from_decl)
5401 unsigned HOST_WIDE_INT cnt;
5402 tree cfield, cval;
5404 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5405 cval)
5407 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5408 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5409 tree field_size = DECL_SIZE (cfield);
5410 offset_int bitoffset;
5411 offset_int bitoffset_end, access_end;
5413 /* Variable sized objects in static constructors makes no sense,
5414 but field_size can be NULL for flexible array members. */
5415 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5416 && TREE_CODE (byte_offset) == INTEGER_CST
5417 && (field_size != NULL_TREE
5418 ? TREE_CODE (field_size) == INTEGER_CST
5419 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5421 /* Compute bit offset of the field. */
5422 bitoffset = (wi::to_offset (field_offset)
5423 + wi::lshift (wi::to_offset (byte_offset),
5424 LOG2_BITS_PER_UNIT));
5425 /* Compute bit offset where the field ends. */
5426 if (field_size != NULL_TREE)
5427 bitoffset_end = bitoffset + wi::to_offset (field_size);
5428 else
5429 bitoffset_end = 0;
5431 access_end = offset_int (offset) + size;
5433 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5434 [BITOFFSET, BITOFFSET_END)? */
5435 if (wi::cmps (access_end, bitoffset) > 0
5436 && (field_size == NULL_TREE
5437 || wi::lts_p (offset, bitoffset_end)))
5439 offset_int inner_offset = offset_int (offset) - bitoffset;
5440 /* We do have overlap. Now see if field is large enough to
5441 cover the access. Give up for accesses spanning multiple
5442 fields. */
5443 if (wi::cmps (access_end, bitoffset_end) > 0)
5444 return NULL_TREE;
5445 if (wi::lts_p (offset, bitoffset))
5446 return NULL_TREE;
5447 return fold_ctor_reference (type, cval,
5448 inner_offset.to_uhwi (), size,
5449 from_decl);
5452 /* When memory is not explicitely mentioned in constructor, it is 0. */
5453 return build_zero_cst (type);
5456 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5457 to the memory at bit OFFSET. */
5459 tree
5460 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5461 unsigned HOST_WIDE_INT size, tree from_decl)
5463 tree ret;
5465 /* We found the field with exact match. */
5466 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5467 && !offset)
5468 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5470 /* We are at the end of walk, see if we can view convert the
5471 result. */
5472 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5473 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5474 && !compare_tree_int (TYPE_SIZE (type), size)
5475 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5477 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5478 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5479 if (ret)
5480 STRIP_USELESS_TYPE_CONVERSION (ret);
5481 return ret;
5483 /* For constants and byte-aligned/sized reads try to go through
5484 native_encode/interpret. */
5485 if (CONSTANT_CLASS_P (ctor)
5486 && BITS_PER_UNIT == 8
5487 && offset % BITS_PER_UNIT == 0
5488 && size % BITS_PER_UNIT == 0
5489 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5491 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5492 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5493 offset / BITS_PER_UNIT) > 0)
5494 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5496 if (TREE_CODE (ctor) == CONSTRUCTOR)
5499 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5500 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5501 return fold_array_ctor_reference (type, ctor, offset, size,
5502 from_decl);
5503 else
5504 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5505 from_decl);
5508 return NULL_TREE;
5511 /* Return the tree representing the element referenced by T if T is an
5512 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5513 names using VALUEIZE. Return NULL_TREE otherwise. */
5515 tree
5516 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5518 tree ctor, idx, base;
5519 HOST_WIDE_INT offset, size, max_size;
5520 tree tem;
5521 bool reverse;
5523 if (TREE_THIS_VOLATILE (t))
5524 return NULL_TREE;
5526 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
5527 return get_symbol_constant_value (t);
5529 tem = fold_read_from_constant_string (t);
5530 if (tem)
5531 return tem;
5533 switch (TREE_CODE (t))
5535 case ARRAY_REF:
5536 case ARRAY_RANGE_REF:
5537 /* Constant indexes are handled well by get_base_constructor.
5538 Only special case variable offsets.
5539 FIXME: This code can't handle nested references with variable indexes
5540 (they will be handled only by iteration of ccp). Perhaps we can bring
5541 get_ref_base_and_extent here and make it use a valueize callback. */
5542 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5543 && valueize
5544 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5545 && TREE_CODE (idx) == INTEGER_CST)
5547 tree low_bound, unit_size;
5549 /* If the resulting bit-offset is constant, track it. */
5550 if ((low_bound = array_ref_low_bound (t),
5551 TREE_CODE (low_bound) == INTEGER_CST)
5552 && (unit_size = array_ref_element_size (t),
5553 tree_fits_uhwi_p (unit_size)))
5555 offset_int woffset
5556 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5557 TYPE_PRECISION (TREE_TYPE (idx)));
5559 if (wi::fits_shwi_p (woffset))
5561 offset = woffset.to_shwi ();
5562 /* TODO: This code seems wrong, multiply then check
5563 to see if it fits. */
5564 offset *= tree_to_uhwi (unit_size);
5565 offset *= BITS_PER_UNIT;
5567 base = TREE_OPERAND (t, 0);
5568 ctor = get_base_constructor (base, &offset, valueize);
5569 /* Empty constructor. Always fold to 0. */
5570 if (ctor == error_mark_node)
5571 return build_zero_cst (TREE_TYPE (t));
5572 /* Out of bound array access. Value is undefined,
5573 but don't fold. */
5574 if (offset < 0)
5575 return NULL_TREE;
5576 /* We can not determine ctor. */
5577 if (!ctor)
5578 return NULL_TREE;
5579 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5580 tree_to_uhwi (unit_size)
5581 * BITS_PER_UNIT,
5582 base);
5586 /* Fallthru. */
5588 case COMPONENT_REF:
5589 case BIT_FIELD_REF:
5590 case TARGET_MEM_REF:
5591 case MEM_REF:
5592 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5593 ctor = get_base_constructor (base, &offset, valueize);
5595 /* Empty constructor. Always fold to 0. */
5596 if (ctor == error_mark_node)
5597 return build_zero_cst (TREE_TYPE (t));
5598 /* We do not know precise address. */
5599 if (max_size == -1 || max_size != size)
5600 return NULL_TREE;
5601 /* We can not determine ctor. */
5602 if (!ctor)
5603 return NULL_TREE;
5605 /* Out of bound array access. Value is undefined, but don't fold. */
5606 if (offset < 0)
5607 return NULL_TREE;
5609 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5610 base);
5612 case REALPART_EXPR:
5613 case IMAGPART_EXPR:
5615 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5616 if (c && TREE_CODE (c) == COMPLEX_CST)
5617 return fold_build1_loc (EXPR_LOCATION (t),
5618 TREE_CODE (t), TREE_TYPE (t), c);
5619 break;
5622 default:
5623 break;
5626 return NULL_TREE;
5629 tree
5630 fold_const_aggregate_ref (tree t)
5632 return fold_const_aggregate_ref_1 (t, NULL);
5635 /* Lookup virtual method with index TOKEN in a virtual table V
5636 at OFFSET.
5637 Set CAN_REFER if non-NULL to false if method
5638 is not referable or if the virtual table is ill-formed (such as rewriten
5639 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5641 tree
5642 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5643 tree v,
5644 unsigned HOST_WIDE_INT offset,
5645 bool *can_refer)
5647 tree vtable = v, init, fn;
5648 unsigned HOST_WIDE_INT size;
5649 unsigned HOST_WIDE_INT elt_size, access_index;
5650 tree domain_type;
5652 if (can_refer)
5653 *can_refer = true;
5655 /* First of all double check we have virtual table. */
5656 if (TREE_CODE (v) != VAR_DECL
5657 || !DECL_VIRTUAL_P (v))
5659 /* Pass down that we lost track of the target. */
5660 if (can_refer)
5661 *can_refer = false;
5662 return NULL_TREE;
5665 init = ctor_for_folding (v);
5667 /* The virtual tables should always be born with constructors
5668 and we always should assume that they are avaialble for
5669 folding. At the moment we do not stream them in all cases,
5670 but it should never happen that ctor seem unreachable. */
5671 gcc_assert (init);
5672 if (init == error_mark_node)
5674 gcc_assert (in_lto_p);
5675 /* Pass down that we lost track of the target. */
5676 if (can_refer)
5677 *can_refer = false;
5678 return NULL_TREE;
5680 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5681 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5682 offset *= BITS_PER_UNIT;
5683 offset += token * size;
5685 /* Lookup the value in the constructor that is assumed to be array.
5686 This is equivalent to
5687 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5688 offset, size, NULL);
5689 but in a constant time. We expect that frontend produced a simple
5690 array without indexed initializers. */
5692 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5693 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5694 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5695 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5697 access_index = offset / BITS_PER_UNIT / elt_size;
5698 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5700 /* This code makes an assumption that there are no
5701 indexed fileds produced by C++ FE, so we can directly index the array. */
5702 if (access_index < CONSTRUCTOR_NELTS (init))
5704 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5705 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5706 STRIP_NOPS (fn);
5708 else
5709 fn = NULL;
5711 /* For type inconsistent program we may end up looking up virtual method
5712 in virtual table that does not contain TOKEN entries. We may overrun
5713 the virtual table and pick up a constant or RTTI info pointer.
5714 In any case the call is undefined. */
5715 if (!fn
5716 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5717 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5718 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5719 else
5721 fn = TREE_OPERAND (fn, 0);
5723 /* When cgraph node is missing and function is not public, we cannot
5724 devirtualize. This can happen in WHOPR when the actual method
5725 ends up in other partition, because we found devirtualization
5726 possibility too late. */
5727 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5729 if (can_refer)
5731 *can_refer = false;
5732 return fn;
5734 return NULL_TREE;
5738 /* Make sure we create a cgraph node for functions we'll reference.
5739 They can be non-existent if the reference comes from an entry
5740 of an external vtable for example. */
5741 cgraph_node::get_create (fn);
5743 return fn;
5746 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5747 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5748 KNOWN_BINFO carries the binfo describing the true type of
5749 OBJ_TYPE_REF_OBJECT(REF).
5750 Set CAN_REFER if non-NULL to false if method
5751 is not referable or if the virtual table is ill-formed (such as rewriten
5752 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5754 tree
5755 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5756 bool *can_refer)
5758 unsigned HOST_WIDE_INT offset;
5759 tree v;
5761 v = BINFO_VTABLE (known_binfo);
5762 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5763 if (!v)
5764 return NULL_TREE;
5766 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5768 if (can_refer)
5769 *can_refer = false;
5770 return NULL_TREE;
5772 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5775 /* Return true iff VAL is a gimple expression that is known to be
5776 non-negative. Restricted to floating-point inputs. */
5778 bool
5779 gimple_val_nonnegative_real_p (tree val)
5781 gimple def_stmt;
5783 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5785 /* Use existing logic for non-gimple trees. */
5786 if (tree_expr_nonnegative_p (val))
5787 return true;
5789 if (TREE_CODE (val) != SSA_NAME)
5790 return false;
5792 /* Currently we look only at the immediately defining statement
5793 to make this determination, since recursion on defining
5794 statements of operands can lead to quadratic behavior in the
5795 worst case. This is expected to catch almost all occurrences
5796 in practice. It would be possible to implement limited-depth
5797 recursion if important cases are lost. Alternatively, passes
5798 that need this information (such as the pow/powi lowering code
5799 in the cse_sincos pass) could be revised to provide it through
5800 dataflow propagation. */
5802 def_stmt = SSA_NAME_DEF_STMT (val);
5804 if (is_gimple_assign (def_stmt))
5806 tree op0, op1;
5808 /* See fold-const.c:tree_expr_nonnegative_p for additional
5809 cases that could be handled with recursion. */
5811 switch (gimple_assign_rhs_code (def_stmt))
5813 case ABS_EXPR:
5814 /* Always true for floating-point operands. */
5815 return true;
5817 case MULT_EXPR:
5818 /* True if the two operands are identical (since we are
5819 restricted to floating-point inputs). */
5820 op0 = gimple_assign_rhs1 (def_stmt);
5821 op1 = gimple_assign_rhs2 (def_stmt);
5823 if (op0 == op1
5824 || operand_equal_p (op0, op1, 0))
5825 return true;
5827 default:
5828 return false;
5831 else if (is_gimple_call (def_stmt))
5833 tree fndecl = gimple_call_fndecl (def_stmt);
5834 if (fndecl
5835 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5837 tree arg1;
5839 switch (DECL_FUNCTION_CODE (fndecl))
5841 CASE_FLT_FN (BUILT_IN_ACOS):
5842 CASE_FLT_FN (BUILT_IN_ACOSH):
5843 CASE_FLT_FN (BUILT_IN_CABS):
5844 CASE_FLT_FN (BUILT_IN_COSH):
5845 CASE_FLT_FN (BUILT_IN_ERFC):
5846 CASE_FLT_FN (BUILT_IN_EXP):
5847 CASE_FLT_FN (BUILT_IN_EXP10):
5848 CASE_FLT_FN (BUILT_IN_EXP2):
5849 CASE_FLT_FN (BUILT_IN_FABS):
5850 CASE_FLT_FN (BUILT_IN_FDIM):
5851 CASE_FLT_FN (BUILT_IN_HYPOT):
5852 CASE_FLT_FN (BUILT_IN_POW10):
5853 return true;
5855 CASE_FLT_FN (BUILT_IN_SQRT):
5856 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5857 nonnegative inputs. */
5858 if (!HONOR_SIGNED_ZEROS (val))
5859 return true;
5861 break;
5863 CASE_FLT_FN (BUILT_IN_POWI):
5864 /* True if the second argument is an even integer. */
5865 arg1 = gimple_call_arg (def_stmt, 1);
5867 if (TREE_CODE (arg1) == INTEGER_CST
5868 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5869 return true;
5871 break;
5873 CASE_FLT_FN (BUILT_IN_POW):
5874 /* True if the second argument is an even integer-valued
5875 real. */
5876 arg1 = gimple_call_arg (def_stmt, 1);
5878 if (TREE_CODE (arg1) == REAL_CST)
5880 REAL_VALUE_TYPE c;
5881 HOST_WIDE_INT n;
5883 c = TREE_REAL_CST (arg1);
5884 n = real_to_integer (&c);
5886 if ((n & 1) == 0)
5888 REAL_VALUE_TYPE cint;
5889 real_from_integer (&cint, VOIDmode, n, SIGNED);
5890 if (real_identical (&c, &cint))
5891 return true;
5895 break;
5897 default:
5898 return false;
5903 return false;
5906 /* Given a pointer value OP0, return a simplified version of an
5907 indirection through OP0, or NULL_TREE if no simplification is
5908 possible. Note that the resulting type may be different from
5909 the type pointed to in the sense that it is still compatible
5910 from the langhooks point of view. */
5912 tree
5913 gimple_fold_indirect_ref (tree t)
5915 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5916 tree sub = t;
5917 tree subtype;
5919 STRIP_NOPS (sub);
5920 subtype = TREE_TYPE (sub);
5921 if (!POINTER_TYPE_P (subtype))
5922 return NULL_TREE;
5924 if (TREE_CODE (sub) == ADDR_EXPR)
5926 tree op = TREE_OPERAND (sub, 0);
5927 tree optype = TREE_TYPE (op);
5928 /* *&p => p */
5929 if (useless_type_conversion_p (type, optype))
5930 return op;
5932 /* *(foo *)&fooarray => fooarray[0] */
5933 if (TREE_CODE (optype) == ARRAY_TYPE
5934 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5935 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5937 tree type_domain = TYPE_DOMAIN (optype);
5938 tree min_val = size_zero_node;
5939 if (type_domain && TYPE_MIN_VALUE (type_domain))
5940 min_val = TYPE_MIN_VALUE (type_domain);
5941 if (TREE_CODE (min_val) == INTEGER_CST)
5942 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5944 /* *(foo *)&complexfoo => __real__ complexfoo */
5945 else if (TREE_CODE (optype) == COMPLEX_TYPE
5946 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5947 return fold_build1 (REALPART_EXPR, type, op);
5948 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5949 else if (TREE_CODE (optype) == VECTOR_TYPE
5950 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5952 tree part_width = TYPE_SIZE (type);
5953 tree index = bitsize_int (0);
5954 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5958 /* *(p + CST) -> ... */
5959 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5960 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5962 tree addr = TREE_OPERAND (sub, 0);
5963 tree off = TREE_OPERAND (sub, 1);
5964 tree addrtype;
5966 STRIP_NOPS (addr);
5967 addrtype = TREE_TYPE (addr);
5969 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5970 if (TREE_CODE (addr) == ADDR_EXPR
5971 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5972 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5973 && tree_fits_uhwi_p (off))
5975 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5976 tree part_width = TYPE_SIZE (type);
5977 unsigned HOST_WIDE_INT part_widthi
5978 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5979 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5980 tree index = bitsize_int (indexi);
5981 if (offset / part_widthi
5982 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5983 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5984 part_width, index);
5987 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5988 if (TREE_CODE (addr) == ADDR_EXPR
5989 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5990 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5992 tree size = TYPE_SIZE_UNIT (type);
5993 if (tree_int_cst_equal (size, off))
5994 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5997 /* *(p + CST) -> MEM_REF <p, CST>. */
5998 if (TREE_CODE (addr) != ADDR_EXPR
5999 || DECL_P (TREE_OPERAND (addr, 0)))
6000 return fold_build2 (MEM_REF, type,
6001 addr,
6002 wide_int_to_tree (ptype, off));
6005 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6006 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6007 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6008 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6010 tree type_domain;
6011 tree min_val = size_zero_node;
6012 tree osub = sub;
6013 sub = gimple_fold_indirect_ref (sub);
6014 if (! sub)
6015 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6016 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6017 if (type_domain && TYPE_MIN_VALUE (type_domain))
6018 min_val = TYPE_MIN_VALUE (type_domain);
6019 if (TREE_CODE (min_val) == INTEGER_CST)
6020 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6023 return NULL_TREE;
6026 /* Return true if CODE is an operation that when operating on signed
6027 integer types involves undefined behavior on overflow and the
6028 operation can be expressed with unsigned arithmetic. */
6030 bool
6031 arith_code_with_undefined_signed_overflow (tree_code code)
6033 switch (code)
6035 case PLUS_EXPR:
6036 case MINUS_EXPR:
6037 case MULT_EXPR:
6038 case NEGATE_EXPR:
6039 case POINTER_PLUS_EXPR:
6040 return true;
6041 default:
6042 return false;
6046 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6047 operation that can be transformed to unsigned arithmetic by converting
6048 its operand, carrying out the operation in the corresponding unsigned
6049 type and converting the result back to the original type.
6051 Returns a sequence of statements that replace STMT and also contain
6052 a modified form of STMT itself. */
6054 gimple_seq
6055 rewrite_to_defined_overflow (gimple stmt)
6057 if (dump_file && (dump_flags & TDF_DETAILS))
6059 fprintf (dump_file, "rewriting stmt with undefined signed "
6060 "overflow ");
6061 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6064 tree lhs = gimple_assign_lhs (stmt);
6065 tree type = unsigned_type_for (TREE_TYPE (lhs));
6066 gimple_seq stmts = NULL;
6067 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6069 gimple_seq stmts2 = NULL;
6070 gimple_set_op (stmt, i,
6071 force_gimple_operand (fold_convert (type,
6072 gimple_op (stmt, i)),
6073 &stmts2, true, NULL_TREE));
6074 gimple_seq_add_seq (&stmts, stmts2);
6076 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6077 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6078 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6079 gimple_seq_add_stmt (&stmts, stmt);
6080 gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6081 gimple_seq_add_stmt (&stmts, cvt);
6083 return stmts;
6087 /* The valueization hook we use for the gimple_build API simplification.
6088 This makes us match fold_buildN behavior by only combining with
6089 statements in the sequence(s) we are currently building. */
6091 static tree
6092 gimple_build_valueize (tree op)
6094 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6095 return op;
6096 return NULL_TREE;
6099 /* Build the expression CODE OP0 of type TYPE with location LOC,
6100 simplifying it first if possible. Returns the built
6101 expression value and appends statements possibly defining it
6102 to SEQ. */
6104 tree
6105 gimple_build (gimple_seq *seq, location_t loc,
6106 enum tree_code code, tree type, tree op0)
6108 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6109 if (!res)
6111 if (gimple_in_ssa_p (cfun))
6112 res = make_ssa_name (type);
6113 else
6114 res = create_tmp_reg (type);
6115 gimple stmt;
6116 if (code == REALPART_EXPR
6117 || code == IMAGPART_EXPR
6118 || code == VIEW_CONVERT_EXPR)
6119 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6120 else
6121 stmt = gimple_build_assign (res, code, op0);
6122 gimple_set_location (stmt, loc);
6123 gimple_seq_add_stmt_without_update (seq, stmt);
6125 return res;
6128 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6129 simplifying it first if possible. Returns the built
6130 expression value and appends statements possibly defining it
6131 to SEQ. */
6133 tree
6134 gimple_build (gimple_seq *seq, location_t loc,
6135 enum tree_code code, tree type, tree op0, tree op1)
6137 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6138 if (!res)
6140 if (gimple_in_ssa_p (cfun))
6141 res = make_ssa_name (type);
6142 else
6143 res = create_tmp_reg (type);
6144 gimple stmt = gimple_build_assign (res, code, op0, op1);
6145 gimple_set_location (stmt, loc);
6146 gimple_seq_add_stmt_without_update (seq, stmt);
6148 return res;
6151 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6152 simplifying it first if possible. Returns the built
6153 expression value and appends statements possibly defining it
6154 to SEQ. */
6156 tree
6157 gimple_build (gimple_seq *seq, location_t loc,
6158 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6160 tree res = gimple_simplify (code, type, op0, op1, op2,
6161 seq, gimple_build_valueize);
6162 if (!res)
6164 if (gimple_in_ssa_p (cfun))
6165 res = make_ssa_name (type);
6166 else
6167 res = create_tmp_reg (type);
6168 gimple stmt;
6169 if (code == BIT_FIELD_REF)
6170 stmt = gimple_build_assign (res, code,
6171 build3 (code, type, op0, op1, op2));
6172 else
6173 stmt = gimple_build_assign (res, code, op0, op1, op2);
6174 gimple_set_location (stmt, loc);
6175 gimple_seq_add_stmt_without_update (seq, stmt);
6177 return res;
6180 /* Build the call FN (ARG0) with a result of type TYPE
6181 (or no result if TYPE is void) with location LOC,
6182 simplifying it first if possible. Returns the built
6183 expression value (or NULL_TREE if TYPE is void) and appends
6184 statements possibly defining it to SEQ. */
6186 tree
6187 gimple_build (gimple_seq *seq, location_t loc,
6188 enum built_in_function fn, tree type, tree arg0)
6190 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6191 if (!res)
6193 tree decl = builtin_decl_implicit (fn);
6194 gimple stmt = gimple_build_call (decl, 1, arg0);
6195 if (!VOID_TYPE_P (type))
6197 if (gimple_in_ssa_p (cfun))
6198 res = make_ssa_name (type);
6199 else
6200 res = create_tmp_reg (type);
6201 gimple_call_set_lhs (stmt, res);
6203 gimple_set_location (stmt, loc);
6204 gimple_seq_add_stmt_without_update (seq, stmt);
6206 return res;
6209 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6210 (or no result if TYPE is void) with location LOC,
6211 simplifying it first if possible. Returns the built
6212 expression value (or NULL_TREE if TYPE is void) and appends
6213 statements possibly defining it to SEQ. */
6215 tree
6216 gimple_build (gimple_seq *seq, location_t loc,
6217 enum built_in_function fn, tree type, tree arg0, tree arg1)
6219 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6220 if (!res)
6222 tree decl = builtin_decl_implicit (fn);
6223 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
6224 if (!VOID_TYPE_P (type))
6226 if (gimple_in_ssa_p (cfun))
6227 res = make_ssa_name (type);
6228 else
6229 res = create_tmp_reg (type);
6230 gimple_call_set_lhs (stmt, res);
6232 gimple_set_location (stmt, loc);
6233 gimple_seq_add_stmt_without_update (seq, stmt);
6235 return res;
6238 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6239 (or no result if TYPE is void) with location LOC,
6240 simplifying it first if possible. Returns the built
6241 expression value (or NULL_TREE if TYPE is void) and appends
6242 statements possibly defining it to SEQ. */
6244 tree
6245 gimple_build (gimple_seq *seq, location_t loc,
6246 enum built_in_function fn, tree type,
6247 tree arg0, tree arg1, tree arg2)
6249 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6250 seq, gimple_build_valueize);
6251 if (!res)
6253 tree decl = builtin_decl_implicit (fn);
6254 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6255 if (!VOID_TYPE_P (type))
6257 if (gimple_in_ssa_p (cfun))
6258 res = make_ssa_name (type);
6259 else
6260 res = create_tmp_reg (type);
6261 gimple_call_set_lhs (stmt, res);
6263 gimple_set_location (stmt, loc);
6264 gimple_seq_add_stmt_without_update (seq, stmt);
6266 return res;
6269 /* Build the conversion (TYPE) OP with a result of type TYPE
6270 with location LOC if such conversion is neccesary in GIMPLE,
6271 simplifying it first.
6272 Returns the built expression value and appends
6273 statements possibly defining it to SEQ. */
6275 tree
6276 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6278 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6279 return op;
6280 return gimple_build (seq, loc, NOP_EXPR, type, op);