2015-04-07 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / gimple-fold.c
blob7cfa5920ae174ad55647b4ac90e6b22cf45b8443
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "hashtab.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "rtl.h"
41 #include "flags.h"
42 #include "statistics.h"
43 #include "real.h"
44 #include "fixed-value.h"
45 #include "insn-config.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "calls.h"
50 #include "emit-rtl.h"
51 #include "varasm.h"
52 #include "stmt.h"
53 #include "expr.h"
54 #include "stor-layout.h"
55 #include "dumpfile.h"
56 #include "bitmap.h"
57 #include "predict.h"
58 #include "dominance.h"
59 #include "basic-block.h"
60 #include "tree-ssa-alias.h"
61 #include "internal-fn.h"
62 #include "gimple-fold.h"
63 #include "gimple-expr.h"
64 #include "is-a.h"
65 #include "gimple.h"
66 #include "gimplify.h"
67 #include "gimple-iterator.h"
68 #include "gimple-ssa.h"
69 #include "tree-ssanames.h"
70 #include "tree-into-ssa.h"
71 #include "tree-dfa.h"
72 #include "tree-ssa.h"
73 #include "tree-ssa-propagate.h"
74 #include "target.h"
75 #include "hash-map.h"
76 #include "plugin-api.h"
77 #include "ipa-ref.h"
78 #include "cgraph.h"
79 #include "ipa-utils.h"
80 #include "gimple-pretty-print.h"
81 #include "tree-ssa-address.h"
82 #include "langhooks.h"
83 #include "gimplify-me.h"
84 #include "dbgcnt.h"
85 #include "builtins.h"
86 #include "output.h"
87 #include "tree-eh.h"
88 #include "gimple-match.h"
89 #include "tree-phinodes.h"
90 #include "ssa-iterators.h"
92 /* Return true when DECL can be referenced from current unit.
93 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
94 We can get declarations that are not possible to reference for various
95 reasons:
97 1) When analyzing C++ virtual tables.
98 C++ virtual tables do have known constructors even
99 when they are keyed to other compilation unit.
100 Those tables can contain pointers to methods and vars
101 in other units. Those methods have both STATIC and EXTERNAL
102 set.
103 2) In WHOPR mode devirtualization might lead to reference
104 to method that was partitioned elsehwere.
105 In this case we have static VAR_DECL or FUNCTION_DECL
106 that has no corresponding callgraph/varpool node
107 declaring the body.
108 3) COMDAT functions referred by external vtables that
109 we devirtualize only during final compilation stage.
110 At this time we already decided that we will not output
111 the function body and thus we can't reference the symbol
112 directly. */
114 static bool
115 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
117 varpool_node *vnode;
118 struct cgraph_node *node;
119 symtab_node *snode;
121 if (DECL_ABSTRACT_P (decl))
122 return false;
124 /* We are concerned only about static/external vars and functions. */
125 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
126 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
127 return true;
129 /* Static objects can be referred only if they was not optimized out yet. */
130 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
132 /* Before we start optimizing unreachable code we can be sure all
133 static objects are defined. */
134 if (symtab->function_flags_ready)
135 return true;
136 snode = symtab_node::get (decl);
137 if (!snode || !snode->definition)
138 return false;
139 node = dyn_cast <cgraph_node *> (snode);
140 return !node || !node->global.inlined_to;
143 /* We will later output the initializer, so we can refer to it.
144 So we are concerned only when DECL comes from initializer of
145 external var or var that has been optimized out. */
146 if (!from_decl
147 || TREE_CODE (from_decl) != VAR_DECL
148 || (!DECL_EXTERNAL (from_decl)
149 && (vnode = varpool_node::get (from_decl)) != NULL
150 && vnode->definition)
151 || (flag_ltrans
152 && (vnode = varpool_node::get (from_decl)) != NULL
153 && vnode->in_other_partition))
154 return true;
155 /* We are folding reference from external vtable. The vtable may reffer
156 to a symbol keyed to other compilation unit. The other compilation
157 unit may be in separate DSO and the symbol may be hidden. */
158 if (DECL_VISIBILITY_SPECIFIED (decl)
159 && DECL_EXTERNAL (decl)
160 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
161 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
162 return false;
163 /* When function is public, we always can introduce new reference.
164 Exception are the COMDAT functions where introducing a direct
165 reference imply need to include function body in the curren tunit. */
166 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
167 return true;
168 /* We have COMDAT. We are going to check if we still have definition
169 or if the definition is going to be output in other partition.
170 Bypass this when gimplifying; all needed functions will be produced.
172 As observed in PR20991 for already optimized out comdat virtual functions
173 it may be tempting to not necessarily give up because the copy will be
174 output elsewhere when corresponding vtable is output.
175 This is however not possible - ABI specify that COMDATs are output in
176 units where they are used and when the other unit was compiled with LTO
177 it is possible that vtable was kept public while the function itself
178 was privatized. */
179 if (!symtab->function_flags_ready)
180 return true;
182 snode = symtab_node::get (decl);
183 if (!snode
184 || ((!snode->definition || DECL_EXTERNAL (decl))
185 && (!snode->in_other_partition
186 || (!snode->forced_by_abi && !snode->force_output))))
187 return false;
188 node = dyn_cast <cgraph_node *> (snode);
189 return !node || !node->global.inlined_to;
192 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
193 acceptable form for is_gimple_min_invariant.
194 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
196 tree
197 canonicalize_constructor_val (tree cval, tree from_decl)
199 tree orig_cval = cval;
200 STRIP_NOPS (cval);
201 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
202 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
204 tree ptr = TREE_OPERAND (cval, 0);
205 if (is_gimple_min_invariant (ptr))
206 cval = build1_loc (EXPR_LOCATION (cval),
207 ADDR_EXPR, TREE_TYPE (ptr),
208 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
209 ptr,
210 fold_convert (ptr_type_node,
211 TREE_OPERAND (cval, 1))));
213 if (TREE_CODE (cval) == ADDR_EXPR)
215 tree base = NULL_TREE;
216 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
218 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
219 if (base)
220 TREE_OPERAND (cval, 0) = base;
222 else
223 base = get_base_address (TREE_OPERAND (cval, 0));
224 if (!base)
225 return NULL_TREE;
227 if ((TREE_CODE (base) == VAR_DECL
228 || TREE_CODE (base) == FUNCTION_DECL)
229 && !can_refer_decl_in_current_unit_p (base, from_decl))
230 return NULL_TREE;
231 if (TREE_CODE (base) == VAR_DECL)
232 TREE_ADDRESSABLE (base) = 1;
233 else if (TREE_CODE (base) == FUNCTION_DECL)
235 /* Make sure we create a cgraph node for functions we'll reference.
236 They can be non-existent if the reference comes from an entry
237 of an external vtable for example. */
238 cgraph_node::get_create (base);
240 /* Fixup types in global initializers. */
241 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
242 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
244 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
245 cval = fold_convert (TREE_TYPE (orig_cval), cval);
246 return cval;
248 if (TREE_OVERFLOW_P (cval))
249 return drop_tree_overflow (cval);
250 return orig_cval;
253 /* If SYM is a constant variable with known value, return the value.
254 NULL_TREE is returned otherwise. */
256 tree
257 get_symbol_constant_value (tree sym)
259 tree val = ctor_for_folding (sym);
260 if (val != error_mark_node)
262 if (val)
264 val = canonicalize_constructor_val (unshare_expr (val), sym);
265 if (val && is_gimple_min_invariant (val))
266 return val;
267 else
268 return NULL_TREE;
270 /* Variables declared 'const' without an initializer
271 have zero as the initializer if they may not be
272 overridden at link or run time. */
273 if (!val
274 && is_gimple_reg_type (TREE_TYPE (sym)))
275 return build_zero_cst (TREE_TYPE (sym));
278 return NULL_TREE;
283 /* Subroutine of fold_stmt. We perform several simplifications of the
284 memory reference tree EXPR and make sure to re-gimplify them properly
285 after propagation of constant addresses. IS_LHS is true if the
286 reference is supposed to be an lvalue. */
288 static tree
289 maybe_fold_reference (tree expr, bool is_lhs)
291 tree result;
293 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
294 || TREE_CODE (expr) == REALPART_EXPR
295 || TREE_CODE (expr) == IMAGPART_EXPR)
296 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
297 return fold_unary_loc (EXPR_LOCATION (expr),
298 TREE_CODE (expr),
299 TREE_TYPE (expr),
300 TREE_OPERAND (expr, 0));
301 else if (TREE_CODE (expr) == BIT_FIELD_REF
302 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
303 return fold_ternary_loc (EXPR_LOCATION (expr),
304 TREE_CODE (expr),
305 TREE_TYPE (expr),
306 TREE_OPERAND (expr, 0),
307 TREE_OPERAND (expr, 1),
308 TREE_OPERAND (expr, 2));
310 if (!is_lhs
311 && (result = fold_const_aggregate_ref (expr))
312 && is_gimple_min_invariant (result))
313 return result;
315 return NULL_TREE;
319 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
320 replacement rhs for the statement or NULL_TREE if no simplification
321 could be made. It is assumed that the operands have been previously
322 folded. */
324 static tree
325 fold_gimple_assign (gimple_stmt_iterator *si)
327 gimple stmt = gsi_stmt (*si);
328 enum tree_code subcode = gimple_assign_rhs_code (stmt);
329 location_t loc = gimple_location (stmt);
331 tree result = NULL_TREE;
333 switch (get_gimple_rhs_class (subcode))
335 case GIMPLE_SINGLE_RHS:
337 tree rhs = gimple_assign_rhs1 (stmt);
339 if (TREE_CLOBBER_P (rhs))
340 return NULL_TREE;
342 if (REFERENCE_CLASS_P (rhs))
343 return maybe_fold_reference (rhs, false);
345 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
347 tree val = OBJ_TYPE_REF_EXPR (rhs);
348 if (is_gimple_min_invariant (val))
349 return val;
350 else if (flag_devirtualize && virtual_method_call_p (rhs))
352 bool final;
353 vec <cgraph_node *>targets
354 = possible_polymorphic_call_targets (rhs, stmt, &final);
355 if (final && targets.length () <= 1 && dbg_cnt (devirt))
357 if (dump_enabled_p ())
359 location_t loc = gimple_location_safe (stmt);
360 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
361 "resolving virtual function address "
362 "reference to function %s\n",
363 targets.length () == 1
364 ? targets[0]->name ()
365 : "NULL");
367 if (targets.length () == 1)
369 val = fold_convert (TREE_TYPE (val),
370 build_fold_addr_expr_loc
371 (loc, targets[0]->decl));
372 STRIP_USELESS_TYPE_CONVERSION (val);
374 else
375 /* We can not use __builtin_unreachable here because it
376 can not have address taken. */
377 val = build_int_cst (TREE_TYPE (val), 0);
378 return val;
383 else if (TREE_CODE (rhs) == ADDR_EXPR)
385 tree ref = TREE_OPERAND (rhs, 0);
386 tree tem = maybe_fold_reference (ref, true);
387 if (tem
388 && TREE_CODE (tem) == MEM_REF
389 && integer_zerop (TREE_OPERAND (tem, 1)))
390 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
391 else if (tem)
392 result = fold_convert (TREE_TYPE (rhs),
393 build_fold_addr_expr_loc (loc, tem));
394 else if (TREE_CODE (ref) == MEM_REF
395 && integer_zerop (TREE_OPERAND (ref, 1)))
396 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
399 else if (TREE_CODE (rhs) == CONSTRUCTOR
400 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
401 && (CONSTRUCTOR_NELTS (rhs)
402 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
409 if (TREE_CODE (val) != INTEGER_CST
410 && TREE_CODE (val) != REAL_CST
411 && TREE_CODE (val) != FIXED_CST)
412 return NULL_TREE;
414 return build_vector_from_ctor (TREE_TYPE (rhs),
415 CONSTRUCTOR_ELTS (rhs));
418 else if (DECL_P (rhs))
419 return get_symbol_constant_value (rhs);
421 /* If we couldn't fold the RHS, hand over to the generic
422 fold routines. */
423 if (result == NULL_TREE)
424 result = fold (rhs);
426 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
427 that may have been added by fold, and "useless" type
428 conversions that might now be apparent due to propagation. */
429 STRIP_USELESS_TYPE_CONVERSION (result);
431 if (result != rhs && valid_gimple_rhs_p (result))
432 return result;
434 return NULL_TREE;
436 break;
438 case GIMPLE_UNARY_RHS:
439 break;
441 case GIMPLE_BINARY_RHS:
442 /* Try to canonicalize for boolean-typed X the comparisons
443 X == 0, X == 1, X != 0, and X != 1. */
444 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
445 || gimple_assign_rhs_code (stmt) == NE_EXPR)
447 tree lhs = gimple_assign_lhs (stmt);
448 tree op1 = gimple_assign_rhs1 (stmt);
449 tree op2 = gimple_assign_rhs2 (stmt);
450 tree type = TREE_TYPE (op1);
452 /* Check whether the comparison operands are of the same boolean
453 type as the result type is.
454 Check that second operand is an integer-constant with value
455 one or zero. */
456 if (TREE_CODE (op2) == INTEGER_CST
457 && (integer_zerop (op2) || integer_onep (op2))
458 && useless_type_conversion_p (TREE_TYPE (lhs), type))
460 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
461 bool is_logical_not = false;
463 /* X == 0 and X != 1 is a logical-not.of X
464 X == 1 and X != 0 is X */
465 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
466 || (cmp_code == NE_EXPR && integer_onep (op2)))
467 is_logical_not = true;
469 if (is_logical_not == false)
470 result = op1;
471 /* Only for one-bit precision typed X the transformation
472 !X -> ~X is valied. */
473 else if (TYPE_PRECISION (type) == 1)
474 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
475 type, op1);
476 /* Otherwise we use !X -> X ^ 1. */
477 else
478 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
479 type, op1, build_int_cst (type, 1));
484 if (result)
486 STRIP_USELESS_TYPE_CONVERSION (result);
487 if (valid_gimple_rhs_p (result))
488 return result;
490 break;
492 case GIMPLE_TERNARY_RHS:
493 break;
495 case GIMPLE_INVALID_RHS:
496 gcc_unreachable ();
499 return NULL_TREE;
502 /* Attempt to fold a conditional statement. Return true if any changes were
503 made. We only attempt to fold the condition expression, and do not perform
504 any transformation that would require alteration of the cfg. It is
505 assumed that the operands have been previously folded. */
507 static bool
508 fold_gimple_cond (gcond *stmt)
510 tree result = fold_binary_loc (gimple_location (stmt),
511 gimple_cond_code (stmt),
512 boolean_type_node,
513 gimple_cond_lhs (stmt),
514 gimple_cond_rhs (stmt));
516 if (result)
518 STRIP_USELESS_TYPE_CONVERSION (result);
519 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
521 gimple_cond_set_condition_from_tree (stmt, result);
522 return true;
526 return false;
530 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
531 adjusting the replacement stmts location and virtual operands.
532 If the statement has a lhs the last stmt in the sequence is expected
533 to assign to that lhs. */
535 static void
536 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
538 gimple stmt = gsi_stmt (*si_p);
540 if (gimple_has_location (stmt))
541 annotate_all_with_location (stmts, gimple_location (stmt));
543 /* First iterate over the replacement statements backward, assigning
544 virtual operands to their defining statements. */
545 gimple laststore = NULL;
546 for (gimple_stmt_iterator i = gsi_last (stmts);
547 !gsi_end_p (i); gsi_prev (&i))
549 gimple new_stmt = gsi_stmt (i);
550 if ((gimple_assign_single_p (new_stmt)
551 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
552 || (is_gimple_call (new_stmt)
553 && (gimple_call_flags (new_stmt)
554 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
556 tree vdef;
557 if (!laststore)
558 vdef = gimple_vdef (stmt);
559 else
560 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
561 gimple_set_vdef (new_stmt, vdef);
562 if (vdef && TREE_CODE (vdef) == SSA_NAME)
563 SSA_NAME_DEF_STMT (vdef) = new_stmt;
564 laststore = new_stmt;
568 /* Second iterate over the statements forward, assigning virtual
569 operands to their uses. */
570 tree reaching_vuse = gimple_vuse (stmt);
571 for (gimple_stmt_iterator i = gsi_start (stmts);
572 !gsi_end_p (i); gsi_next (&i))
574 gimple new_stmt = gsi_stmt (i);
575 /* If the new statement possibly has a VUSE, update it with exact SSA
576 name we know will reach this one. */
577 if (gimple_has_mem_ops (new_stmt))
578 gimple_set_vuse (new_stmt, reaching_vuse);
579 gimple_set_modified (new_stmt, true);
580 if (gimple_vdef (new_stmt))
581 reaching_vuse = gimple_vdef (new_stmt);
584 /* If the new sequence does not do a store release the virtual
585 definition of the original statement. */
586 if (reaching_vuse
587 && reaching_vuse == gimple_vuse (stmt))
589 tree vdef = gimple_vdef (stmt);
590 if (vdef
591 && TREE_CODE (vdef) == SSA_NAME)
593 unlink_stmt_vdef (stmt);
594 release_ssa_name (vdef);
598 /* Finally replace the original statement with the sequence. */
599 gsi_replace_with_seq (si_p, stmts, false);
602 /* Convert EXPR into a GIMPLE value suitable for substitution on the
603 RHS of an assignment. Insert the necessary statements before
604 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
605 is replaced. If the call is expected to produces a result, then it
606 is replaced by an assignment of the new RHS to the result variable.
607 If the result is to be ignored, then the call is replaced by a
608 GIMPLE_NOP. A proper VDEF chain is retained by making the first
609 VUSE and the last VDEF of the whole sequence be the same as the replaced
610 statement and using new SSA names for stores in between. */
612 void
613 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
615 tree lhs;
616 gimple stmt, new_stmt;
617 gimple_stmt_iterator i;
618 gimple_seq stmts = NULL;
620 stmt = gsi_stmt (*si_p);
622 gcc_assert (is_gimple_call (stmt));
624 push_gimplify_context (gimple_in_ssa_p (cfun));
626 lhs = gimple_call_lhs (stmt);
627 if (lhs == NULL_TREE)
629 gimplify_and_add (expr, &stmts);
630 /* We can end up with folding a memcpy of an empty class assignment
631 which gets optimized away by C++ gimplification. */
632 if (gimple_seq_empty_p (stmts))
634 pop_gimplify_context (NULL);
635 if (gimple_in_ssa_p (cfun))
637 unlink_stmt_vdef (stmt);
638 release_defs (stmt);
640 gsi_replace (si_p, gimple_build_nop (), true);
641 return;
644 else
646 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
647 new_stmt = gimple_build_assign (lhs, tmp);
648 i = gsi_last (stmts);
649 gsi_insert_after_without_update (&i, new_stmt,
650 GSI_CONTINUE_LINKING);
653 pop_gimplify_context (NULL);
655 gsi_replace_with_seq_vops (si_p, stmts);
659 /* Replace the call at *GSI with the gimple value VAL. */
661 static void
662 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
664 gimple stmt = gsi_stmt (*gsi);
665 tree lhs = gimple_call_lhs (stmt);
666 gimple repl;
667 if (lhs)
669 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
670 val = fold_convert (TREE_TYPE (lhs), val);
671 repl = gimple_build_assign (lhs, val);
673 else
674 repl = gimple_build_nop ();
675 tree vdef = gimple_vdef (stmt);
676 if (vdef && TREE_CODE (vdef) == SSA_NAME)
678 unlink_stmt_vdef (stmt);
679 release_ssa_name (vdef);
681 gsi_replace (gsi, repl, true);
684 /* Replace the call at *GSI with the new call REPL and fold that
685 again. */
687 static void
688 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
690 gimple stmt = gsi_stmt (*gsi);
691 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
692 gimple_set_location (repl, gimple_location (stmt));
693 if (gimple_vdef (stmt)
694 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
696 gimple_set_vdef (repl, gimple_vdef (stmt));
697 gimple_set_vuse (repl, gimple_vuse (stmt));
698 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
700 gsi_replace (gsi, repl, true);
701 fold_stmt (gsi);
704 /* Return true if VAR is a VAR_DECL or a component thereof. */
706 static bool
707 var_decl_component_p (tree var)
709 tree inner = var;
710 while (handled_component_p (inner))
711 inner = TREE_OPERAND (inner, 0);
712 return SSA_VAR_P (inner);
715 /* Fold function call to builtin mem{{,p}cpy,move}. Return
716 NULL_TREE if no simplification can be made.
717 If ENDP is 0, return DEST (like memcpy).
718 If ENDP is 1, return DEST+LEN (like mempcpy).
719 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
720 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
721 (memmove). */
723 static bool
724 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
725 tree dest, tree src, int endp)
727 gimple stmt = gsi_stmt (*gsi);
728 tree lhs = gimple_call_lhs (stmt);
729 tree len = gimple_call_arg (stmt, 2);
730 tree destvar, srcvar;
731 location_t loc = gimple_location (stmt);
733 /* If the LEN parameter is zero, return DEST. */
734 if (integer_zerop (len))
736 gimple repl;
737 if (gimple_call_lhs (stmt))
738 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
739 else
740 repl = gimple_build_nop ();
741 tree vdef = gimple_vdef (stmt);
742 if (vdef && TREE_CODE (vdef) == SSA_NAME)
744 unlink_stmt_vdef (stmt);
745 release_ssa_name (vdef);
747 gsi_replace (gsi, repl, true);
748 return true;
751 /* If SRC and DEST are the same (and not volatile), return
752 DEST{,+LEN,+LEN-1}. */
753 if (operand_equal_p (src, dest, 0))
755 unlink_stmt_vdef (stmt);
756 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
757 release_ssa_name (gimple_vdef (stmt));
758 if (!lhs)
760 gsi_replace (gsi, gimple_build_nop (), true);
761 return true;
763 goto done;
765 else
767 tree srctype, desttype;
768 unsigned int src_align, dest_align;
769 tree off0;
771 /* Build accesses at offset zero with a ref-all character type. */
772 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
773 ptr_mode, true), 0);
775 /* If we can perform the copy efficiently with first doing all loads
776 and then all stores inline it that way. Currently efficiently
777 means that we can load all the memory into a single integer
778 register which is what MOVE_MAX gives us. */
779 src_align = get_pointer_alignment (src);
780 dest_align = get_pointer_alignment (dest);
781 if (tree_fits_uhwi_p (len)
782 && compare_tree_int (len, MOVE_MAX) <= 0
783 /* ??? Don't transform copies from strings with known length this
784 confuses the tree-ssa-strlen.c. This doesn't handle
785 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
786 reason. */
787 && !c_strlen (src, 2))
789 unsigned ilen = tree_to_uhwi (len);
790 if (exact_log2 (ilen) != -1)
792 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
793 if (type
794 && TYPE_MODE (type) != BLKmode
795 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
796 == ilen * 8)
797 /* If the destination pointer is not aligned we must be able
798 to emit an unaligned store. */
799 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
800 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
802 tree srctype = type;
803 tree desttype = type;
804 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
805 srctype = build_aligned_type (type, src_align);
806 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
807 tree tem = fold_const_aggregate_ref (srcmem);
808 if (tem)
809 srcmem = tem;
810 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
811 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
812 src_align))
813 srcmem = NULL_TREE;
814 if (srcmem)
816 gimple new_stmt;
817 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
819 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
820 if (gimple_in_ssa_p (cfun))
821 srcmem = make_ssa_name (TREE_TYPE (srcmem),
822 new_stmt);
823 else
824 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
825 gimple_assign_set_lhs (new_stmt, srcmem);
826 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
827 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
829 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
830 desttype = build_aligned_type (type, dest_align);
831 new_stmt
832 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
833 dest, off0),
834 srcmem);
835 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
836 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
837 if (gimple_vdef (new_stmt)
838 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
839 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
840 if (!lhs)
842 gsi_replace (gsi, new_stmt, true);
843 return true;
845 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
846 goto done;
852 if (endp == 3)
854 /* Both DEST and SRC must be pointer types.
855 ??? This is what old code did. Is the testing for pointer types
856 really mandatory?
858 If either SRC is readonly or length is 1, we can use memcpy. */
859 if (!dest_align || !src_align)
860 return false;
861 if (readonly_data_expr (src)
862 || (tree_fits_uhwi_p (len)
863 && (MIN (src_align, dest_align) / BITS_PER_UNIT
864 >= tree_to_uhwi (len))))
866 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
867 if (!fn)
868 return false;
869 gimple_call_set_fndecl (stmt, fn);
870 gimple_call_set_arg (stmt, 0, dest);
871 gimple_call_set_arg (stmt, 1, src);
872 fold_stmt (gsi);
873 return true;
876 /* If *src and *dest can't overlap, optimize into memcpy as well. */
877 if (TREE_CODE (src) == ADDR_EXPR
878 && TREE_CODE (dest) == ADDR_EXPR)
880 tree src_base, dest_base, fn;
881 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
882 HOST_WIDE_INT size = -1;
883 HOST_WIDE_INT maxsize = -1;
885 srcvar = TREE_OPERAND (src, 0);
886 src_base = get_ref_base_and_extent (srcvar, &src_offset,
887 &size, &maxsize);
888 destvar = TREE_OPERAND (dest, 0);
889 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
890 &size, &maxsize);
891 if (tree_fits_uhwi_p (len))
892 maxsize = tree_to_uhwi (len);
893 else
894 maxsize = -1;
895 src_offset /= BITS_PER_UNIT;
896 dest_offset /= BITS_PER_UNIT;
897 if (SSA_VAR_P (src_base)
898 && SSA_VAR_P (dest_base))
900 if (operand_equal_p (src_base, dest_base, 0)
901 && ranges_overlap_p (src_offset, maxsize,
902 dest_offset, maxsize))
903 return false;
905 else if (TREE_CODE (src_base) == MEM_REF
906 && TREE_CODE (dest_base) == MEM_REF)
908 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
909 TREE_OPERAND (dest_base, 0), 0))
910 return false;
911 offset_int off = mem_ref_offset (src_base) + src_offset;
912 if (!wi::fits_shwi_p (off))
913 return false;
914 src_offset = off.to_shwi ();
916 off = mem_ref_offset (dest_base) + dest_offset;
917 if (!wi::fits_shwi_p (off))
918 return false;
919 dest_offset = off.to_shwi ();
920 if (ranges_overlap_p (src_offset, maxsize,
921 dest_offset, maxsize))
922 return false;
924 else
925 return false;
927 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
928 if (!fn)
929 return false;
930 gimple_call_set_fndecl (stmt, fn);
931 gimple_call_set_arg (stmt, 0, dest);
932 gimple_call_set_arg (stmt, 1, src);
933 fold_stmt (gsi);
934 return true;
937 /* If the destination and source do not alias optimize into
938 memcpy as well. */
939 if ((is_gimple_min_invariant (dest)
940 || TREE_CODE (dest) == SSA_NAME)
941 && (is_gimple_min_invariant (src)
942 || TREE_CODE (src) == SSA_NAME))
944 ao_ref destr, srcr;
945 ao_ref_init_from_ptr_and_size (&destr, dest, len);
946 ao_ref_init_from_ptr_and_size (&srcr, src, len);
947 if (!refs_may_alias_p_1 (&destr, &srcr, false))
949 tree fn;
950 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
951 if (!fn)
952 return false;
953 gimple_call_set_fndecl (stmt, fn);
954 gimple_call_set_arg (stmt, 0, dest);
955 gimple_call_set_arg (stmt, 1, src);
956 fold_stmt (gsi);
957 return true;
961 return false;
964 if (!tree_fits_shwi_p (len))
965 return false;
966 /* FIXME:
967 This logic lose for arguments like (type *)malloc (sizeof (type)),
968 since we strip the casts of up to VOID return value from malloc.
969 Perhaps we ought to inherit type from non-VOID argument here? */
970 STRIP_NOPS (src);
971 STRIP_NOPS (dest);
972 if (!POINTER_TYPE_P (TREE_TYPE (src))
973 || !POINTER_TYPE_P (TREE_TYPE (dest)))
974 return false;
975 /* In the following try to find a type that is most natural to be
976 used for the memcpy source and destination and that allows
977 the most optimization when memcpy is turned into a plain assignment
978 using that type. In theory we could always use a char[len] type
979 but that only gains us that the destination and source possibly
980 no longer will have their address taken. */
981 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
982 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
984 tree tem = TREE_OPERAND (src, 0);
985 STRIP_NOPS (tem);
986 if (tem != TREE_OPERAND (src, 0))
987 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
989 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
991 tree tem = TREE_OPERAND (dest, 0);
992 STRIP_NOPS (tem);
993 if (tem != TREE_OPERAND (dest, 0))
994 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
996 srctype = TREE_TYPE (TREE_TYPE (src));
997 if (TREE_CODE (srctype) == ARRAY_TYPE
998 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1000 srctype = TREE_TYPE (srctype);
1001 STRIP_NOPS (src);
1002 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1004 desttype = TREE_TYPE (TREE_TYPE (dest));
1005 if (TREE_CODE (desttype) == ARRAY_TYPE
1006 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1008 desttype = TREE_TYPE (desttype);
1009 STRIP_NOPS (dest);
1010 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1012 if (TREE_ADDRESSABLE (srctype)
1013 || TREE_ADDRESSABLE (desttype))
1014 return false;
1016 /* Make sure we are not copying using a floating-point mode or
1017 a type whose size possibly does not match its precision. */
1018 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1019 || TREE_CODE (desttype) == BOOLEAN_TYPE
1020 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1021 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1022 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1023 || TREE_CODE (srctype) == BOOLEAN_TYPE
1024 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1025 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1026 if (!srctype)
1027 srctype = desttype;
1028 if (!desttype)
1029 desttype = srctype;
1030 if (!srctype)
1031 return false;
1033 src_align = get_pointer_alignment (src);
1034 dest_align = get_pointer_alignment (dest);
1035 if (dest_align < TYPE_ALIGN (desttype)
1036 || src_align < TYPE_ALIGN (srctype))
1037 return false;
1039 destvar = dest;
1040 STRIP_NOPS (destvar);
1041 if (TREE_CODE (destvar) == ADDR_EXPR
1042 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1043 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1044 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1045 else
1046 destvar = NULL_TREE;
1048 srcvar = src;
1049 STRIP_NOPS (srcvar);
1050 if (TREE_CODE (srcvar) == ADDR_EXPR
1051 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1052 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1054 if (!destvar
1055 || src_align >= TYPE_ALIGN (desttype))
1056 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1057 srcvar, off0);
1058 else if (!STRICT_ALIGNMENT)
1060 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1061 src_align);
1062 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1064 else
1065 srcvar = NULL_TREE;
1067 else
1068 srcvar = NULL_TREE;
1070 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1071 return false;
1073 if (srcvar == NULL_TREE)
1075 STRIP_NOPS (src);
1076 if (src_align >= TYPE_ALIGN (desttype))
1077 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1078 else
1080 if (STRICT_ALIGNMENT)
1081 return false;
1082 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1083 src_align);
1084 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1087 else if (destvar == NULL_TREE)
1089 STRIP_NOPS (dest);
1090 if (dest_align >= TYPE_ALIGN (srctype))
1091 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1092 else
1094 if (STRICT_ALIGNMENT)
1095 return false;
1096 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1097 dest_align);
1098 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1102 gimple new_stmt;
1103 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1105 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1106 if (gimple_in_ssa_p (cfun))
1107 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1108 else
1109 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1110 gimple_assign_set_lhs (new_stmt, srcvar);
1111 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1112 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1114 new_stmt = gimple_build_assign (destvar, srcvar);
1115 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1116 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1117 if (gimple_vdef (new_stmt)
1118 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1119 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1120 if (!lhs)
1122 gsi_replace (gsi, new_stmt, true);
1123 return true;
1125 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1128 done:
1129 if (endp == 0 || endp == 3)
1130 len = NULL_TREE;
1131 else if (endp == 2)
1132 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1133 ssize_int (1));
1134 if (endp == 2 || endp == 1)
1135 dest = fold_build_pointer_plus_loc (loc, dest, len);
1137 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1138 GSI_SAME_STMT);
1139 gimple repl = gimple_build_assign (lhs, dest);
1140 gsi_replace (gsi, repl, true);
1141 return true;
1144 /* Fold function call to builtin memset or bzero at *GSI setting the
1145 memory of size LEN to VAL. Return whether a simplification was made. */
1147 static bool
1148 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1150 gimple stmt = gsi_stmt (*gsi);
1151 tree etype;
1152 unsigned HOST_WIDE_INT length, cval;
1154 /* If the LEN parameter is zero, return DEST. */
1155 if (integer_zerop (len))
1157 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1158 return true;
1161 if (! tree_fits_uhwi_p (len))
1162 return false;
1164 if (TREE_CODE (c) != INTEGER_CST)
1165 return false;
1167 tree dest = gimple_call_arg (stmt, 0);
1168 tree var = dest;
1169 if (TREE_CODE (var) != ADDR_EXPR)
1170 return false;
1172 var = TREE_OPERAND (var, 0);
1173 if (TREE_THIS_VOLATILE (var))
1174 return false;
1176 etype = TREE_TYPE (var);
1177 if (TREE_CODE (etype) == ARRAY_TYPE)
1178 etype = TREE_TYPE (etype);
1180 if (!INTEGRAL_TYPE_P (etype)
1181 && !POINTER_TYPE_P (etype))
1182 return NULL_TREE;
1184 if (! var_decl_component_p (var))
1185 return NULL_TREE;
1187 length = tree_to_uhwi (len);
1188 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1189 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1190 return NULL_TREE;
1192 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1193 return NULL_TREE;
1195 if (integer_zerop (c))
1196 cval = 0;
1197 else
1199 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1200 return NULL_TREE;
1202 cval = TREE_INT_CST_LOW (c);
1203 cval &= 0xff;
1204 cval |= cval << 8;
1205 cval |= cval << 16;
1206 cval |= (cval << 31) << 1;
1209 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1210 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1211 gimple_set_vuse (store, gimple_vuse (stmt));
1212 tree vdef = gimple_vdef (stmt);
1213 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1215 gimple_set_vdef (store, gimple_vdef (stmt));
1216 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1218 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1219 if (gimple_call_lhs (stmt))
1221 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1222 gsi_replace (gsi, asgn, true);
1224 else
1226 gimple_stmt_iterator gsi2 = *gsi;
1227 gsi_prev (gsi);
1228 gsi_remove (&gsi2, true);
1231 return true;
1235 /* Return the string length, maximum string length or maximum value of
1236 ARG in LENGTH.
1237 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1238 is not NULL and, for TYPE == 0, its value is not equal to the length
1239 we determine or if we are unable to determine the length or value,
1240 return false. VISITED is a bitmap of visited variables.
1241 TYPE is 0 if string length should be returned, 1 for maximum string
1242 length and 2 for maximum value ARG can have. */
1244 static bool
1245 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1247 tree var, val;
1248 gimple def_stmt;
1250 if (TREE_CODE (arg) != SSA_NAME)
1252 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1253 if (TREE_CODE (arg) == ADDR_EXPR
1254 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1255 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1257 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1258 if (TREE_CODE (aop0) == INDIRECT_REF
1259 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1260 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1261 length, visited, type);
1264 if (type == 2)
1266 val = arg;
1267 if (TREE_CODE (val) != INTEGER_CST
1268 || tree_int_cst_sgn (val) < 0)
1269 return false;
1271 else
1272 val = c_strlen (arg, 1);
1273 if (!val)
1274 return false;
1276 if (*length)
1278 if (type > 0)
1280 if (TREE_CODE (*length) != INTEGER_CST
1281 || TREE_CODE (val) != INTEGER_CST)
1282 return false;
1284 if (tree_int_cst_lt (*length, val))
1285 *length = val;
1286 return true;
1288 else if (simple_cst_equal (val, *length) != 1)
1289 return false;
1292 *length = val;
1293 return true;
1296 /* If ARG is registered for SSA update we cannot look at its defining
1297 statement. */
1298 if (name_registered_for_update_p (arg))
1299 return false;
1301 /* If we were already here, break the infinite cycle. */
1302 if (!*visited)
1303 *visited = BITMAP_ALLOC (NULL);
1304 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1305 return true;
1307 var = arg;
1308 def_stmt = SSA_NAME_DEF_STMT (var);
1310 switch (gimple_code (def_stmt))
1312 case GIMPLE_ASSIGN:
1313 /* The RHS of the statement defining VAR must either have a
1314 constant length or come from another SSA_NAME with a constant
1315 length. */
1316 if (gimple_assign_single_p (def_stmt)
1317 || gimple_assign_unary_nop_p (def_stmt))
1319 tree rhs = gimple_assign_rhs1 (def_stmt);
1320 return get_maxval_strlen (rhs, length, visited, type);
1322 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1324 tree op2 = gimple_assign_rhs2 (def_stmt);
1325 tree op3 = gimple_assign_rhs3 (def_stmt);
1326 return get_maxval_strlen (op2, length, visited, type)
1327 && get_maxval_strlen (op3, length, visited, type);
1329 return false;
1331 case GIMPLE_PHI:
1333 /* All the arguments of the PHI node must have the same constant
1334 length. */
1335 unsigned i;
1337 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1339 tree arg = gimple_phi_arg (def_stmt, i)->def;
1341 /* If this PHI has itself as an argument, we cannot
1342 determine the string length of this argument. However,
1343 if we can find a constant string length for the other
1344 PHI args then we can still be sure that this is a
1345 constant string length. So be optimistic and just
1346 continue with the next argument. */
1347 if (arg == gimple_phi_result (def_stmt))
1348 continue;
1350 if (!get_maxval_strlen (arg, length, visited, type))
1351 return false;
1354 return true;
1356 default:
1357 return false;
1361 tree
1362 get_maxval_strlen (tree arg, int type)
1364 bitmap visited = NULL;
1365 tree len = NULL_TREE;
1366 if (!get_maxval_strlen (arg, &len, &visited, type))
1367 len = NULL_TREE;
1368 if (visited)
1369 BITMAP_FREE (visited);
1371 return len;
1375 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1376 If LEN is not NULL, it represents the length of the string to be
1377 copied. Return NULL_TREE if no simplification can be made. */
1379 static bool
1380 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1381 tree dest, tree src)
1383 location_t loc = gimple_location (gsi_stmt (*gsi));
1384 tree fn;
1386 /* If SRC and DEST are the same (and not volatile), return DEST. */
1387 if (operand_equal_p (src, dest, 0))
1389 replace_call_with_value (gsi, dest);
1390 return true;
1393 if (optimize_function_for_size_p (cfun))
1394 return false;
1396 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1397 if (!fn)
1398 return false;
1400 tree len = get_maxval_strlen (src, 0);
1401 if (!len)
1402 return false;
1404 len = fold_convert_loc (loc, size_type_node, len);
1405 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1406 len = force_gimple_operand_gsi (gsi, len, true,
1407 NULL_TREE, true, GSI_SAME_STMT);
1408 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1409 replace_call_with_call_and_fold (gsi, repl);
1410 return true;
1413 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1414 If SLEN is not NULL, it represents the length of the source string.
1415 Return NULL_TREE if no simplification can be made. */
1417 static bool
1418 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1419 tree dest, tree src, tree len)
1421 location_t loc = gimple_location (gsi_stmt (*gsi));
1422 tree fn;
1424 /* If the LEN parameter is zero, return DEST. */
1425 if (integer_zerop (len))
1427 replace_call_with_value (gsi, dest);
1428 return true;
1431 /* We can't compare slen with len as constants below if len is not a
1432 constant. */
1433 if (TREE_CODE (len) != INTEGER_CST)
1434 return false;
1436 /* Now, we must be passed a constant src ptr parameter. */
1437 tree slen = get_maxval_strlen (src, 0);
1438 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1439 return false;
1441 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1443 /* We do not support simplification of this case, though we do
1444 support it when expanding trees into RTL. */
1445 /* FIXME: generate a call to __builtin_memset. */
1446 if (tree_int_cst_lt (slen, len))
1447 return false;
1449 /* OK transform into builtin memcpy. */
1450 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1451 if (!fn)
1452 return false;
1454 len = fold_convert_loc (loc, size_type_node, len);
1455 len = force_gimple_operand_gsi (gsi, len, true,
1456 NULL_TREE, true, GSI_SAME_STMT);
1457 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1458 replace_call_with_call_and_fold (gsi, repl);
1459 return true;
1462 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1463 to the call.
1465 Return NULL_TREE if no simplification was possible, otherwise return the
1466 simplified form of the call as a tree.
1468 The simplified form may be a constant or other expression which
1469 computes the same value, but in a more efficient manner (including
1470 calls to other builtin functions).
1472 The call may contain arguments which need to be evaluated, but
1473 which are not useful to determine the result of the call. In
1474 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1475 COMPOUND_EXPR will be an argument which must be evaluated.
1476 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1477 COMPOUND_EXPR in the chain will contain the tree for the simplified
1478 form of the builtin function call. */
1480 static bool
1481 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1483 gimple stmt = gsi_stmt (*gsi);
1484 location_t loc = gimple_location (stmt);
1486 const char *p = c_getstr (src);
1488 /* If the string length is zero, return the dst parameter. */
1489 if (p && *p == '\0')
1491 replace_call_with_value (gsi, dst);
1492 return true;
1495 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1496 return false;
1498 /* See if we can store by pieces into (dst + strlen(dst)). */
1499 tree newdst;
1500 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1501 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1503 if (!strlen_fn || !memcpy_fn)
1504 return false;
1506 /* If the length of the source string isn't computable don't
1507 split strcat into strlen and memcpy. */
1508 tree len = get_maxval_strlen (src, 0);
1509 if (! len)
1510 return false;
1512 /* Create strlen (dst). */
1513 gimple_seq stmts = NULL, stmts2;
1514 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1515 gimple_set_location (repl, loc);
1516 if (gimple_in_ssa_p (cfun))
1517 newdst = make_ssa_name (size_type_node);
1518 else
1519 newdst = create_tmp_reg (size_type_node);
1520 gimple_call_set_lhs (repl, newdst);
1521 gimple_seq_add_stmt_without_update (&stmts, repl);
1523 /* Create (dst p+ strlen (dst)). */
1524 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1525 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1526 gimple_seq_add_seq_without_update (&stmts, stmts2);
1528 len = fold_convert_loc (loc, size_type_node, len);
1529 len = size_binop_loc (loc, PLUS_EXPR, len,
1530 build_int_cst (size_type_node, 1));
1531 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1532 gimple_seq_add_seq_without_update (&stmts, stmts2);
1534 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1535 gimple_seq_add_stmt_without_update (&stmts, repl);
1536 if (gimple_call_lhs (stmt))
1538 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1539 gimple_seq_add_stmt_without_update (&stmts, repl);
1540 gsi_replace_with_seq_vops (gsi, stmts);
1541 /* gsi now points at the assignment to the lhs, get a
1542 stmt iterator to the memcpy call.
1543 ??? We can't use gsi_for_stmt as that doesn't work when the
1544 CFG isn't built yet. */
1545 gimple_stmt_iterator gsi2 = *gsi;
1546 gsi_prev (&gsi2);
1547 fold_stmt (&gsi2);
1549 else
1551 gsi_replace_with_seq_vops (gsi, stmts);
1552 fold_stmt (gsi);
1554 return true;
1557 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1558 are the arguments to the call. */
1560 static bool
1561 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1563 gimple stmt = gsi_stmt (*gsi);
1564 tree dest = gimple_call_arg (stmt, 0);
1565 tree src = gimple_call_arg (stmt, 1);
1566 tree size = gimple_call_arg (stmt, 2);
1567 tree fn;
1568 const char *p;
1571 p = c_getstr (src);
1572 /* If the SRC parameter is "", return DEST. */
1573 if (p && *p == '\0')
1575 replace_call_with_value (gsi, dest);
1576 return true;
1579 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1580 return false;
1582 /* If __builtin_strcat_chk is used, assume strcat is available. */
1583 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1584 if (!fn)
1585 return false;
1587 gimple repl = gimple_build_call (fn, 2, dest, src);
1588 replace_call_with_call_and_fold (gsi, repl);
1589 return true;
1592 /* Simplify a call to the strncat builtin. */
1594 static bool
1595 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1597 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1598 tree dst = gimple_call_arg (stmt, 0);
1599 tree src = gimple_call_arg (stmt, 1);
1600 tree len = gimple_call_arg (stmt, 2);
1602 const char *p = c_getstr (src);
1604 /* If the requested length is zero, or the src parameter string
1605 length is zero, return the dst parameter. */
1606 if (integer_zerop (len) || (p && *p == '\0'))
1608 replace_call_with_value (gsi, dst);
1609 return true;
1612 /* If the requested len is greater than or equal to the string
1613 length, call strcat. */
1614 if (TREE_CODE (len) == INTEGER_CST && p
1615 && compare_tree_int (len, strlen (p)) >= 0)
1617 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1619 /* If the replacement _DECL isn't initialized, don't do the
1620 transformation. */
1621 if (!fn)
1622 return false;
1624 gcall *repl = gimple_build_call (fn, 2, dst, src);
1625 replace_call_with_call_and_fold (gsi, repl);
1626 return true;
1629 return false;
1632 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1633 LEN, and SIZE. */
1635 static bool
1636 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1638 gimple stmt = gsi_stmt (*gsi);
1639 tree dest = gimple_call_arg (stmt, 0);
1640 tree src = gimple_call_arg (stmt, 1);
1641 tree len = gimple_call_arg (stmt, 2);
1642 tree size = gimple_call_arg (stmt, 3);
1643 tree fn;
1644 const char *p;
1646 p = c_getstr (src);
1647 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1648 if ((p && *p == '\0')
1649 || integer_zerop (len))
1651 replace_call_with_value (gsi, dest);
1652 return true;
1655 if (! tree_fits_uhwi_p (size))
1656 return false;
1658 if (! integer_all_onesp (size))
1660 tree src_len = c_strlen (src, 1);
1661 if (src_len
1662 && tree_fits_uhwi_p (src_len)
1663 && tree_fits_uhwi_p (len)
1664 && ! tree_int_cst_lt (len, src_len))
1666 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1667 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1668 if (!fn)
1669 return false;
1671 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1672 replace_call_with_call_and_fold (gsi, repl);
1673 return true;
1675 return false;
1678 /* If __builtin_strncat_chk is used, assume strncat is available. */
1679 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1680 if (!fn)
1681 return false;
1683 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1684 replace_call_with_call_and_fold (gsi, repl);
1685 return true;
1688 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1689 to the call. IGNORE is true if the value returned
1690 by the builtin will be ignored. UNLOCKED is true is true if this
1691 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1692 the known length of the string. Return NULL_TREE if no simplification
1693 was possible. */
1695 static bool
1696 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1697 tree arg0, tree arg1,
1698 bool unlocked)
1700 gimple stmt = gsi_stmt (*gsi);
1702 /* If we're using an unlocked function, assume the other unlocked
1703 functions exist explicitly. */
1704 tree const fn_fputc = (unlocked
1705 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1706 : builtin_decl_implicit (BUILT_IN_FPUTC));
1707 tree const fn_fwrite = (unlocked
1708 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1709 : builtin_decl_implicit (BUILT_IN_FWRITE));
1711 /* If the return value is used, don't do the transformation. */
1712 if (gimple_call_lhs (stmt))
1713 return false;
1715 /* Get the length of the string passed to fputs. If the length
1716 can't be determined, punt. */
1717 tree len = get_maxval_strlen (arg0, 0);
1718 if (!len
1719 || TREE_CODE (len) != INTEGER_CST)
1720 return false;
1722 switch (compare_tree_int (len, 1))
1724 case -1: /* length is 0, delete the call entirely . */
1725 replace_call_with_value (gsi, integer_zero_node);
1726 return true;
1728 case 0: /* length is 1, call fputc. */
1730 const char *p = c_getstr (arg0);
1731 if (p != NULL)
1733 if (!fn_fputc)
1734 return false;
1736 gimple repl = gimple_build_call (fn_fputc, 2,
1737 build_int_cst
1738 (integer_type_node, p[0]), arg1);
1739 replace_call_with_call_and_fold (gsi, repl);
1740 return true;
1743 /* FALLTHROUGH */
1744 case 1: /* length is greater than 1, call fwrite. */
1746 /* If optimizing for size keep fputs. */
1747 if (optimize_function_for_size_p (cfun))
1748 return false;
1749 /* New argument list transforming fputs(string, stream) to
1750 fwrite(string, 1, len, stream). */
1751 if (!fn_fwrite)
1752 return false;
1754 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1755 size_one_node, len, arg1);
1756 replace_call_with_call_and_fold (gsi, repl);
1757 return true;
1759 default:
1760 gcc_unreachable ();
1762 return false;
1765 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1766 DEST, SRC, LEN, and SIZE are the arguments to the call.
1767 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1768 code of the builtin. If MAXLEN is not NULL, it is maximum length
1769 passed as third argument. */
1771 static bool
1772 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1773 tree dest, tree src, tree len, tree size,
1774 enum built_in_function fcode)
1776 gimple stmt = gsi_stmt (*gsi);
1777 location_t loc = gimple_location (stmt);
1778 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1779 tree fn;
1781 /* If SRC and DEST are the same (and not volatile), return DEST
1782 (resp. DEST+LEN for __mempcpy_chk). */
1783 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1785 if (fcode != BUILT_IN_MEMPCPY_CHK)
1787 replace_call_with_value (gsi, dest);
1788 return true;
1790 else
1792 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1793 temp = force_gimple_operand_gsi (gsi, temp,
1794 false, NULL_TREE, true,
1795 GSI_SAME_STMT);
1796 replace_call_with_value (gsi, temp);
1797 return true;
1801 if (! tree_fits_uhwi_p (size))
1802 return false;
1804 tree maxlen = get_maxval_strlen (len, 2);
1805 if (! integer_all_onesp (size))
1807 if (! tree_fits_uhwi_p (len))
1809 /* If LEN is not constant, try MAXLEN too.
1810 For MAXLEN only allow optimizing into non-_ocs function
1811 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1812 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1814 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1816 /* (void) __mempcpy_chk () can be optimized into
1817 (void) __memcpy_chk (). */
1818 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1819 if (!fn)
1820 return false;
1822 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1823 replace_call_with_call_and_fold (gsi, repl);
1824 return true;
1826 return false;
1829 else
1830 maxlen = len;
1832 if (tree_int_cst_lt (size, maxlen))
1833 return false;
1836 fn = NULL_TREE;
1837 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1838 mem{cpy,pcpy,move,set} is available. */
1839 switch (fcode)
1841 case BUILT_IN_MEMCPY_CHK:
1842 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1843 break;
1844 case BUILT_IN_MEMPCPY_CHK:
1845 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1846 break;
1847 case BUILT_IN_MEMMOVE_CHK:
1848 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1849 break;
1850 case BUILT_IN_MEMSET_CHK:
1851 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1852 break;
1853 default:
1854 break;
1857 if (!fn)
1858 return false;
1860 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1861 replace_call_with_call_and_fold (gsi, repl);
1862 return true;
1865 /* Fold a call to the __st[rp]cpy_chk builtin.
1866 DEST, SRC, and SIZE are the arguments to the call.
1867 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1868 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1869 strings passed as second argument. */
1871 static bool
1872 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1873 tree dest,
1874 tree src, tree size,
1875 enum built_in_function fcode)
1877 gimple stmt = gsi_stmt (*gsi);
1878 location_t loc = gimple_location (stmt);
1879 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1880 tree len, fn;
1882 /* If SRC and DEST are the same (and not volatile), return DEST. */
1883 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1885 replace_call_with_value (gsi, dest);
1886 return true;
1889 if (! tree_fits_uhwi_p (size))
1890 return false;
1892 tree maxlen = get_maxval_strlen (src, 1);
1893 if (! integer_all_onesp (size))
1895 len = c_strlen (src, 1);
1896 if (! len || ! tree_fits_uhwi_p (len))
1898 /* If LEN is not constant, try MAXLEN too.
1899 For MAXLEN only allow optimizing into non-_ocs function
1900 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1901 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1903 if (fcode == BUILT_IN_STPCPY_CHK)
1905 if (! ignore)
1906 return false;
1908 /* If return value of __stpcpy_chk is ignored,
1909 optimize into __strcpy_chk. */
1910 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1911 if (!fn)
1912 return false;
1914 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1915 replace_call_with_call_and_fold (gsi, repl);
1916 return true;
1919 if (! len || TREE_SIDE_EFFECTS (len))
1920 return false;
1922 /* If c_strlen returned something, but not a constant,
1923 transform __strcpy_chk into __memcpy_chk. */
1924 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1925 if (!fn)
1926 return false;
1928 len = fold_convert_loc (loc, size_type_node, len);
1929 len = size_binop_loc (loc, PLUS_EXPR, len,
1930 build_int_cst (size_type_node, 1));
1931 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1932 true, GSI_SAME_STMT);
1933 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1934 replace_call_with_call_and_fold (gsi, repl);
1935 return true;
1938 else
1939 maxlen = len;
1941 if (! tree_int_cst_lt (maxlen, size))
1942 return false;
1945 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1946 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1947 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1948 if (!fn)
1949 return false;
1951 gimple repl = gimple_build_call (fn, 2, dest, src);
1952 replace_call_with_call_and_fold (gsi, repl);
1953 return true;
1956 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1957 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1958 length passed as third argument. IGNORE is true if return value can be
1959 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1961 static bool
1962 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1963 tree dest, tree src,
1964 tree len, tree size,
1965 enum built_in_function fcode)
1967 gimple stmt = gsi_stmt (*gsi);
1968 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1969 tree fn;
1971 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1973 /* If return value of __stpncpy_chk is ignored,
1974 optimize into __strncpy_chk. */
1975 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1976 if (fn)
1978 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1979 replace_call_with_call_and_fold (gsi, repl);
1980 return true;
1984 if (! tree_fits_uhwi_p (size))
1985 return false;
1987 tree maxlen = get_maxval_strlen (len, 2);
1988 if (! integer_all_onesp (size))
1990 if (! tree_fits_uhwi_p (len))
1992 /* If LEN is not constant, try MAXLEN too.
1993 For MAXLEN only allow optimizing into non-_ocs function
1994 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1995 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1996 return false;
1998 else
1999 maxlen = len;
2001 if (tree_int_cst_lt (size, maxlen))
2002 return false;
2005 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2006 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2007 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2008 if (!fn)
2009 return false;
2011 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2012 replace_call_with_call_and_fold (gsi, repl);
2013 return true;
2016 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2017 Return NULL_TREE if no simplification can be made. */
2019 static bool
2020 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2022 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2023 location_t loc = gimple_location (stmt);
2024 tree dest = gimple_call_arg (stmt, 0);
2025 tree src = gimple_call_arg (stmt, 1);
2026 tree fn, len, lenp1;
2028 /* If the result is unused, replace stpcpy with strcpy. */
2029 if (gimple_call_lhs (stmt) == NULL_TREE)
2031 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2032 if (!fn)
2033 return false;
2034 gimple_call_set_fndecl (stmt, fn);
2035 fold_stmt (gsi);
2036 return true;
2039 len = c_strlen (src, 1);
2040 if (!len
2041 || TREE_CODE (len) != INTEGER_CST)
2042 return false;
2044 if (optimize_function_for_size_p (cfun)
2045 /* If length is zero it's small enough. */
2046 && !integer_zerop (len))
2047 return false;
2049 /* If the source has a known length replace stpcpy with memcpy. */
2050 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2051 if (!fn)
2052 return false;
2054 gimple_seq stmts = NULL;
2055 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2056 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2057 tem, build_int_cst (size_type_node, 1));
2058 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2059 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2060 gimple_set_vuse (repl, gimple_vuse (stmt));
2061 gimple_set_vdef (repl, gimple_vdef (stmt));
2062 if (gimple_vdef (repl)
2063 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2064 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2065 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2066 /* Replace the result with dest + len. */
2067 stmts = NULL;
2068 tem = gimple_convert (&stmts, loc, sizetype, len);
2069 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2070 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2071 POINTER_PLUS_EXPR, dest, tem);
2072 gsi_replace (gsi, ret, true);
2073 /* Finally fold the memcpy call. */
2074 gimple_stmt_iterator gsi2 = *gsi;
2075 gsi_prev (&gsi2);
2076 fold_stmt (&gsi2);
2077 return true;
2080 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2081 NULL_TREE if a normal call should be emitted rather than expanding
2082 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2083 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2084 passed as second argument. */
2086 static bool
2087 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2088 enum built_in_function fcode)
2090 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2091 tree dest, size, len, fn, fmt, flag;
2092 const char *fmt_str;
2094 /* Verify the required arguments in the original call. */
2095 if (gimple_call_num_args (stmt) < 5)
2096 return false;
2098 dest = gimple_call_arg (stmt, 0);
2099 len = gimple_call_arg (stmt, 1);
2100 flag = gimple_call_arg (stmt, 2);
2101 size = gimple_call_arg (stmt, 3);
2102 fmt = gimple_call_arg (stmt, 4);
2104 if (! tree_fits_uhwi_p (size))
2105 return false;
2107 if (! integer_all_onesp (size))
2109 tree maxlen = get_maxval_strlen (len, 2);
2110 if (! tree_fits_uhwi_p (len))
2112 /* If LEN is not constant, try MAXLEN too.
2113 For MAXLEN only allow optimizing into non-_ocs function
2114 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2115 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2116 return false;
2118 else
2119 maxlen = len;
2121 if (tree_int_cst_lt (size, maxlen))
2122 return false;
2125 if (!init_target_chars ())
2126 return false;
2128 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2129 or if format doesn't contain % chars or is "%s". */
2130 if (! integer_zerop (flag))
2132 fmt_str = c_getstr (fmt);
2133 if (fmt_str == NULL)
2134 return false;
2135 if (strchr (fmt_str, target_percent) != NULL
2136 && strcmp (fmt_str, target_percent_s))
2137 return false;
2140 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2141 available. */
2142 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2143 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2144 if (!fn)
2145 return false;
2147 /* Replace the called function and the first 5 argument by 3 retaining
2148 trailing varargs. */
2149 gimple_call_set_fndecl (stmt, fn);
2150 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2151 gimple_call_set_arg (stmt, 0, dest);
2152 gimple_call_set_arg (stmt, 1, len);
2153 gimple_call_set_arg (stmt, 2, fmt);
2154 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2155 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2156 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2157 fold_stmt (gsi);
2158 return true;
2161 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2162 Return NULL_TREE if a normal call should be emitted rather than
2163 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2164 or BUILT_IN_VSPRINTF_CHK. */
2166 static bool
2167 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2168 enum built_in_function fcode)
2170 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2171 tree dest, size, len, fn, fmt, flag;
2172 const char *fmt_str;
2173 unsigned nargs = gimple_call_num_args (stmt);
2175 /* Verify the required arguments in the original call. */
2176 if (nargs < 4)
2177 return false;
2178 dest = gimple_call_arg (stmt, 0);
2179 flag = gimple_call_arg (stmt, 1);
2180 size = gimple_call_arg (stmt, 2);
2181 fmt = gimple_call_arg (stmt, 3);
2183 if (! tree_fits_uhwi_p (size))
2184 return false;
2186 len = NULL_TREE;
2188 if (!init_target_chars ())
2189 return false;
2191 /* Check whether the format is a literal string constant. */
2192 fmt_str = c_getstr (fmt);
2193 if (fmt_str != NULL)
2195 /* If the format doesn't contain % args or %%, we know the size. */
2196 if (strchr (fmt_str, target_percent) == 0)
2198 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2199 len = build_int_cstu (size_type_node, strlen (fmt_str));
2201 /* If the format is "%s" and first ... argument is a string literal,
2202 we know the size too. */
2203 else if (fcode == BUILT_IN_SPRINTF_CHK
2204 && strcmp (fmt_str, target_percent_s) == 0)
2206 tree arg;
2208 if (nargs == 5)
2210 arg = gimple_call_arg (stmt, 4);
2211 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2213 len = c_strlen (arg, 1);
2214 if (! len || ! tree_fits_uhwi_p (len))
2215 len = NULL_TREE;
2221 if (! integer_all_onesp (size))
2223 if (! len || ! tree_int_cst_lt (len, size))
2224 return false;
2227 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2228 or if format doesn't contain % chars or is "%s". */
2229 if (! integer_zerop (flag))
2231 if (fmt_str == NULL)
2232 return false;
2233 if (strchr (fmt_str, target_percent) != NULL
2234 && strcmp (fmt_str, target_percent_s))
2235 return false;
2238 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2239 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2240 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2241 if (!fn)
2242 return false;
2244 /* Replace the called function and the first 4 argument by 2 retaining
2245 trailing varargs. */
2246 gimple_call_set_fndecl (stmt, fn);
2247 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2248 gimple_call_set_arg (stmt, 0, dest);
2249 gimple_call_set_arg (stmt, 1, fmt);
2250 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2251 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2252 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2253 fold_stmt (gsi);
2254 return true;
2257 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2258 ORIG may be null if this is a 2-argument call. We don't attempt to
2259 simplify calls with more than 3 arguments.
2261 Return NULL_TREE if no simplification was possible, otherwise return the
2262 simplified form of the call as a tree. If IGNORED is true, it means that
2263 the caller does not use the returned value of the function. */
2265 static bool
2266 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2268 gimple stmt = gsi_stmt (*gsi);
2269 tree dest = gimple_call_arg (stmt, 0);
2270 tree fmt = gimple_call_arg (stmt, 1);
2271 tree orig = NULL_TREE;
2272 const char *fmt_str = NULL;
2274 /* Verify the required arguments in the original call. We deal with two
2275 types of sprintf() calls: 'sprintf (str, fmt)' and
2276 'sprintf (dest, "%s", orig)'. */
2277 if (gimple_call_num_args (stmt) > 3)
2278 return false;
2280 if (gimple_call_num_args (stmt) == 3)
2281 orig = gimple_call_arg (stmt, 2);
2283 /* Check whether the format is a literal string constant. */
2284 fmt_str = c_getstr (fmt);
2285 if (fmt_str == NULL)
2286 return false;
2288 if (!init_target_chars ())
2289 return false;
2291 /* If the format doesn't contain % args or %%, use strcpy. */
2292 if (strchr (fmt_str, target_percent) == NULL)
2294 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2296 if (!fn)
2297 return false;
2299 /* Don't optimize sprintf (buf, "abc", ptr++). */
2300 if (orig)
2301 return false;
2303 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2304 'format' is known to contain no % formats. */
2305 gimple_seq stmts = NULL;
2306 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2307 gimple_seq_add_stmt_without_update (&stmts, repl);
2308 if (gimple_call_lhs (stmt))
2310 repl = gimple_build_assign (gimple_call_lhs (stmt),
2311 build_int_cst (integer_type_node,
2312 strlen (fmt_str)));
2313 gimple_seq_add_stmt_without_update (&stmts, repl);
2314 gsi_replace_with_seq_vops (gsi, stmts);
2315 /* gsi now points at the assignment to the lhs, get a
2316 stmt iterator to the memcpy call.
2317 ??? We can't use gsi_for_stmt as that doesn't work when the
2318 CFG isn't built yet. */
2319 gimple_stmt_iterator gsi2 = *gsi;
2320 gsi_prev (&gsi2);
2321 fold_stmt (&gsi2);
2323 else
2325 gsi_replace_with_seq_vops (gsi, stmts);
2326 fold_stmt (gsi);
2328 return true;
2331 /* If the format is "%s", use strcpy if the result isn't used. */
2332 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2334 tree fn;
2335 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2337 if (!fn)
2338 return false;
2340 /* Don't crash on sprintf (str1, "%s"). */
2341 if (!orig)
2342 return false;
2344 tree orig_len = NULL_TREE;
2345 if (gimple_call_lhs (stmt))
2347 orig_len = get_maxval_strlen (orig, 0);
2348 if (!orig_len)
2349 return false;
2352 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2353 gimple_seq stmts = NULL;
2354 gimple repl = gimple_build_call (fn, 2, dest, orig);
2355 gimple_seq_add_stmt_without_update (&stmts, repl);
2356 if (gimple_call_lhs (stmt))
2358 if (!useless_type_conversion_p (integer_type_node,
2359 TREE_TYPE (orig_len)))
2360 orig_len = fold_convert (integer_type_node, orig_len);
2361 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2362 gimple_seq_add_stmt_without_update (&stmts, repl);
2363 gsi_replace_with_seq_vops (gsi, stmts);
2364 /* gsi now points at the assignment to the lhs, get a
2365 stmt iterator to the memcpy call.
2366 ??? We can't use gsi_for_stmt as that doesn't work when the
2367 CFG isn't built yet. */
2368 gimple_stmt_iterator gsi2 = *gsi;
2369 gsi_prev (&gsi2);
2370 fold_stmt (&gsi2);
2372 else
2374 gsi_replace_with_seq_vops (gsi, stmts);
2375 fold_stmt (gsi);
2377 return true;
2379 return false;
2382 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2383 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2384 attempt to simplify calls with more than 4 arguments.
2386 Return NULL_TREE if no simplification was possible, otherwise return the
2387 simplified form of the call as a tree. If IGNORED is true, it means that
2388 the caller does not use the returned value of the function. */
2390 static bool
2391 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2393 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2394 tree dest = gimple_call_arg (stmt, 0);
2395 tree destsize = gimple_call_arg (stmt, 1);
2396 tree fmt = gimple_call_arg (stmt, 2);
2397 tree orig = NULL_TREE;
2398 const char *fmt_str = NULL;
2400 if (gimple_call_num_args (stmt) > 4)
2401 return false;
2403 if (gimple_call_num_args (stmt) == 4)
2404 orig = gimple_call_arg (stmt, 3);
2406 if (!tree_fits_uhwi_p (destsize))
2407 return false;
2408 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2410 /* Check whether the format is a literal string constant. */
2411 fmt_str = c_getstr (fmt);
2412 if (fmt_str == NULL)
2413 return false;
2415 if (!init_target_chars ())
2416 return false;
2418 /* If the format doesn't contain % args or %%, use strcpy. */
2419 if (strchr (fmt_str, target_percent) == NULL)
2421 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2422 if (!fn)
2423 return false;
2425 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2426 if (orig)
2427 return false;
2429 /* We could expand this as
2430 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2431 or to
2432 memcpy (str, fmt_with_nul_at_cstm1, cst);
2433 but in the former case that might increase code size
2434 and in the latter case grow .rodata section too much.
2435 So punt for now. */
2436 size_t len = strlen (fmt_str);
2437 if (len >= destlen)
2438 return false;
2440 gimple_seq stmts = NULL;
2441 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2442 gimple_seq_add_stmt_without_update (&stmts, repl);
2443 if (gimple_call_lhs (stmt))
2445 repl = gimple_build_assign (gimple_call_lhs (stmt),
2446 build_int_cst (integer_type_node, len));
2447 gimple_seq_add_stmt_without_update (&stmts, repl);
2448 gsi_replace_with_seq_vops (gsi, stmts);
2449 /* gsi now points at the assignment to the lhs, get a
2450 stmt iterator to the memcpy call.
2451 ??? We can't use gsi_for_stmt as that doesn't work when the
2452 CFG isn't built yet. */
2453 gimple_stmt_iterator gsi2 = *gsi;
2454 gsi_prev (&gsi2);
2455 fold_stmt (&gsi2);
2457 else
2459 gsi_replace_with_seq_vops (gsi, stmts);
2460 fold_stmt (gsi);
2462 return true;
2465 /* If the format is "%s", use strcpy if the result isn't used. */
2466 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2468 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2469 if (!fn)
2470 return false;
2472 /* Don't crash on snprintf (str1, cst, "%s"). */
2473 if (!orig)
2474 return false;
2476 tree orig_len = get_maxval_strlen (orig, 0);
2477 if (!orig_len)
2478 return false;
2480 /* We could expand this as
2481 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2482 or to
2483 memcpy (str1, str2_with_nul_at_cstm1, cst);
2484 but in the former case that might increase code size
2485 and in the latter case grow .rodata section too much.
2486 So punt for now. */
2487 if (compare_tree_int (orig_len, destlen) >= 0)
2488 return false;
2490 /* Convert snprintf (str1, cst, "%s", str2) into
2491 strcpy (str1, str2) if strlen (str2) < cst. */
2492 gimple_seq stmts = NULL;
2493 gimple repl = gimple_build_call (fn, 2, dest, orig);
2494 gimple_seq_add_stmt_without_update (&stmts, repl);
2495 if (gimple_call_lhs (stmt))
2497 if (!useless_type_conversion_p (integer_type_node,
2498 TREE_TYPE (orig_len)))
2499 orig_len = fold_convert (integer_type_node, orig_len);
2500 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2501 gimple_seq_add_stmt_without_update (&stmts, repl);
2502 gsi_replace_with_seq_vops (gsi, stmts);
2503 /* gsi now points at the assignment to the lhs, get a
2504 stmt iterator to the memcpy call.
2505 ??? We can't use gsi_for_stmt as that doesn't work when the
2506 CFG isn't built yet. */
2507 gimple_stmt_iterator gsi2 = *gsi;
2508 gsi_prev (&gsi2);
2509 fold_stmt (&gsi2);
2511 else
2513 gsi_replace_with_seq_vops (gsi, stmts);
2514 fold_stmt (gsi);
2516 return true;
2518 return false;
2521 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2522 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2523 more than 3 arguments, and ARG may be null in the 2-argument case.
2525 Return NULL_TREE if no simplification was possible, otherwise return the
2526 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2527 code of the function to be simplified. */
2529 static bool
2530 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2531 tree fp, tree fmt, tree arg,
2532 enum built_in_function fcode)
2534 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2535 tree fn_fputc, fn_fputs;
2536 const char *fmt_str = NULL;
2538 /* If the return value is used, don't do the transformation. */
2539 if (gimple_call_lhs (stmt) != NULL_TREE)
2540 return false;
2542 /* Check whether the format is a literal string constant. */
2543 fmt_str = c_getstr (fmt);
2544 if (fmt_str == NULL)
2545 return false;
2547 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2549 /* If we're using an unlocked function, assume the other
2550 unlocked functions exist explicitly. */
2551 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2552 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2554 else
2556 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2557 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2560 if (!init_target_chars ())
2561 return false;
2563 /* If the format doesn't contain % args or %%, use strcpy. */
2564 if (strchr (fmt_str, target_percent) == NULL)
2566 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2567 && arg)
2568 return false;
2570 /* If the format specifier was "", fprintf does nothing. */
2571 if (fmt_str[0] == '\0')
2573 replace_call_with_value (gsi, NULL_TREE);
2574 return true;
2577 /* When "string" doesn't contain %, replace all cases of
2578 fprintf (fp, string) with fputs (string, fp). The fputs
2579 builtin will take care of special cases like length == 1. */
2580 if (fn_fputs)
2582 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2583 replace_call_with_call_and_fold (gsi, repl);
2584 return true;
2588 /* The other optimizations can be done only on the non-va_list variants. */
2589 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2590 return false;
2592 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2593 else if (strcmp (fmt_str, target_percent_s) == 0)
2595 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2596 return false;
2597 if (fn_fputs)
2599 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2600 replace_call_with_call_and_fold (gsi, repl);
2601 return true;
2605 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2606 else if (strcmp (fmt_str, target_percent_c) == 0)
2608 if (!arg
2609 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2610 return false;
2611 if (fn_fputc)
2613 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2614 replace_call_with_call_and_fold (gsi, repl);
2615 return true;
2619 return false;
2622 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2623 FMT and ARG are the arguments to the call; we don't fold cases with
2624 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2626 Return NULL_TREE if no simplification was possible, otherwise return the
2627 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2628 code of the function to be simplified. */
2630 static bool
2631 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2632 tree arg, enum built_in_function fcode)
2634 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2635 tree fn_putchar, fn_puts, newarg;
2636 const char *fmt_str = NULL;
2638 /* If the return value is used, don't do the transformation. */
2639 if (gimple_call_lhs (stmt) != NULL_TREE)
2640 return false;
2642 /* Check whether the format is a literal string constant. */
2643 fmt_str = c_getstr (fmt);
2644 if (fmt_str == NULL)
2645 return false;
2647 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2649 /* If we're using an unlocked function, assume the other
2650 unlocked functions exist explicitly. */
2651 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2652 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2654 else
2656 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2657 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2660 if (!init_target_chars ())
2661 return false;
2663 if (strcmp (fmt_str, target_percent_s) == 0
2664 || strchr (fmt_str, target_percent) == NULL)
2666 const char *str;
2668 if (strcmp (fmt_str, target_percent_s) == 0)
2670 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2671 return false;
2673 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2674 return false;
2676 str = c_getstr (arg);
2677 if (str == NULL)
2678 return false;
2680 else
2682 /* The format specifier doesn't contain any '%' characters. */
2683 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2684 && arg)
2685 return false;
2686 str = fmt_str;
2689 /* If the string was "", printf does nothing. */
2690 if (str[0] == '\0')
2692 replace_call_with_value (gsi, NULL_TREE);
2693 return true;
2696 /* If the string has length of 1, call putchar. */
2697 if (str[1] == '\0')
2699 /* Given printf("c"), (where c is any one character,)
2700 convert "c"[0] to an int and pass that to the replacement
2701 function. */
2702 newarg = build_int_cst (integer_type_node, str[0]);
2703 if (fn_putchar)
2705 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2706 replace_call_with_call_and_fold (gsi, repl);
2707 return true;
2710 else
2712 /* If the string was "string\n", call puts("string"). */
2713 size_t len = strlen (str);
2714 if ((unsigned char)str[len - 1] == target_newline
2715 && (size_t) (int) len == len
2716 && (int) len > 0)
2718 char *newstr;
2719 tree offset_node, string_cst;
2721 /* Create a NUL-terminated string that's one char shorter
2722 than the original, stripping off the trailing '\n'. */
2723 newarg = build_string_literal (len, str);
2724 string_cst = string_constant (newarg, &offset_node);
2725 gcc_checking_assert (string_cst
2726 && (TREE_STRING_LENGTH (string_cst)
2727 == (int) len)
2728 && integer_zerop (offset_node)
2729 && (unsigned char)
2730 TREE_STRING_POINTER (string_cst)[len - 1]
2731 == target_newline);
2732 /* build_string_literal creates a new STRING_CST,
2733 modify it in place to avoid double copying. */
2734 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2735 newstr[len - 1] = '\0';
2736 if (fn_puts)
2738 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2739 replace_call_with_call_and_fold (gsi, repl);
2740 return true;
2743 else
2744 /* We'd like to arrange to call fputs(string,stdout) here,
2745 but we need stdout and don't have a way to get it yet. */
2746 return false;
2750 /* The other optimizations can be done only on the non-va_list variants. */
2751 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2752 return false;
2754 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2755 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2757 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2758 return false;
2759 if (fn_puts)
2761 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2762 replace_call_with_call_and_fold (gsi, repl);
2763 return true;
2767 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2768 else if (strcmp (fmt_str, target_percent_c) == 0)
2770 if (!arg || ! useless_type_conversion_p (integer_type_node,
2771 TREE_TYPE (arg)))
2772 return false;
2773 if (fn_putchar)
2775 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2776 replace_call_with_call_and_fold (gsi, repl);
2777 return true;
2781 return false;
2786 /* Fold a call to __builtin_strlen with known length LEN. */
2788 static bool
2789 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2791 gimple stmt = gsi_stmt (*gsi);
2792 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2793 if (!len)
2794 return false;
2795 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2796 replace_call_with_value (gsi, len);
2797 return true;
2801 /* Fold the non-target builtin at *GSI and return whether any simplification
2802 was made. */
2804 static bool
2805 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2807 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2808 tree callee = gimple_call_fndecl (stmt);
2810 /* Give up for always_inline inline builtins until they are
2811 inlined. */
2812 if (avoid_folding_inline_builtin (callee))
2813 return false;
2815 unsigned n = gimple_call_num_args (stmt);
2816 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2817 switch (fcode)
2819 case BUILT_IN_BZERO:
2820 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2821 gimple_call_arg (stmt, 1));
2822 case BUILT_IN_MEMSET:
2823 return gimple_fold_builtin_memset (gsi,
2824 gimple_call_arg (stmt, 1),
2825 gimple_call_arg (stmt, 2));
2826 case BUILT_IN_BCOPY:
2827 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2828 gimple_call_arg (stmt, 0), 3);
2829 case BUILT_IN_MEMCPY:
2830 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2831 gimple_call_arg (stmt, 1), 0);
2832 case BUILT_IN_MEMPCPY:
2833 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2834 gimple_call_arg (stmt, 1), 1);
2835 case BUILT_IN_MEMMOVE:
2836 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2837 gimple_call_arg (stmt, 1), 3);
2838 case BUILT_IN_SPRINTF_CHK:
2839 case BUILT_IN_VSPRINTF_CHK:
2840 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2841 case BUILT_IN_STRCAT_CHK:
2842 return gimple_fold_builtin_strcat_chk (gsi);
2843 case BUILT_IN_STRNCAT_CHK:
2844 return gimple_fold_builtin_strncat_chk (gsi);
2845 case BUILT_IN_STRLEN:
2846 return gimple_fold_builtin_strlen (gsi);
2847 case BUILT_IN_STRCPY:
2848 return gimple_fold_builtin_strcpy (gsi,
2849 gimple_call_arg (stmt, 0),
2850 gimple_call_arg (stmt, 1));
2851 case BUILT_IN_STRNCPY:
2852 return gimple_fold_builtin_strncpy (gsi,
2853 gimple_call_arg (stmt, 0),
2854 gimple_call_arg (stmt, 1),
2855 gimple_call_arg (stmt, 2));
2856 case BUILT_IN_STRCAT:
2857 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2858 gimple_call_arg (stmt, 1));
2859 case BUILT_IN_STRNCAT:
2860 return gimple_fold_builtin_strncat (gsi);
2861 case BUILT_IN_FPUTS:
2862 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2863 gimple_call_arg (stmt, 1), false);
2864 case BUILT_IN_FPUTS_UNLOCKED:
2865 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2866 gimple_call_arg (stmt, 1), true);
2867 case BUILT_IN_MEMCPY_CHK:
2868 case BUILT_IN_MEMPCPY_CHK:
2869 case BUILT_IN_MEMMOVE_CHK:
2870 case BUILT_IN_MEMSET_CHK:
2871 return gimple_fold_builtin_memory_chk (gsi,
2872 gimple_call_arg (stmt, 0),
2873 gimple_call_arg (stmt, 1),
2874 gimple_call_arg (stmt, 2),
2875 gimple_call_arg (stmt, 3),
2876 fcode);
2877 case BUILT_IN_STPCPY:
2878 return gimple_fold_builtin_stpcpy (gsi);
2879 case BUILT_IN_STRCPY_CHK:
2880 case BUILT_IN_STPCPY_CHK:
2881 return gimple_fold_builtin_stxcpy_chk (gsi,
2882 gimple_call_arg (stmt, 0),
2883 gimple_call_arg (stmt, 1),
2884 gimple_call_arg (stmt, 2),
2885 fcode);
2886 case BUILT_IN_STRNCPY_CHK:
2887 case BUILT_IN_STPNCPY_CHK:
2888 return gimple_fold_builtin_stxncpy_chk (gsi,
2889 gimple_call_arg (stmt, 0),
2890 gimple_call_arg (stmt, 1),
2891 gimple_call_arg (stmt, 2),
2892 gimple_call_arg (stmt, 3),
2893 fcode);
2894 case BUILT_IN_SNPRINTF_CHK:
2895 case BUILT_IN_VSNPRINTF_CHK:
2896 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2897 case BUILT_IN_SNPRINTF:
2898 return gimple_fold_builtin_snprintf (gsi);
2899 case BUILT_IN_SPRINTF:
2900 return gimple_fold_builtin_sprintf (gsi);
2901 case BUILT_IN_FPRINTF:
2902 case BUILT_IN_FPRINTF_UNLOCKED:
2903 case BUILT_IN_VFPRINTF:
2904 if (n == 2 || n == 3)
2905 return gimple_fold_builtin_fprintf (gsi,
2906 gimple_call_arg (stmt, 0),
2907 gimple_call_arg (stmt, 1),
2908 n == 3
2909 ? gimple_call_arg (stmt, 2)
2910 : NULL_TREE,
2911 fcode);
2912 break;
2913 case BUILT_IN_FPRINTF_CHK:
2914 case BUILT_IN_VFPRINTF_CHK:
2915 if (n == 3 || n == 4)
2916 return gimple_fold_builtin_fprintf (gsi,
2917 gimple_call_arg (stmt, 0),
2918 gimple_call_arg (stmt, 2),
2919 n == 4
2920 ? gimple_call_arg (stmt, 3)
2921 : NULL_TREE,
2922 fcode);
2923 break;
2924 case BUILT_IN_PRINTF:
2925 case BUILT_IN_PRINTF_UNLOCKED:
2926 case BUILT_IN_VPRINTF:
2927 if (n == 1 || n == 2)
2928 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2929 n == 2
2930 ? gimple_call_arg (stmt, 1)
2931 : NULL_TREE, fcode);
2932 break;
2933 case BUILT_IN_PRINTF_CHK:
2934 case BUILT_IN_VPRINTF_CHK:
2935 if (n == 2 || n == 3)
2936 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2937 n == 3
2938 ? gimple_call_arg (stmt, 2)
2939 : NULL_TREE, fcode);
2940 default:;
2943 /* Try the generic builtin folder. */
2944 bool ignore = (gimple_call_lhs (stmt) == NULL);
2945 tree result = fold_call_stmt (stmt, ignore);
2946 if (result)
2948 if (ignore)
2949 STRIP_NOPS (result);
2950 else
2951 result = fold_convert (gimple_call_return_type (stmt), result);
2952 if (!update_call_from_tree (gsi, result))
2953 gimplify_and_update_call_from_tree (gsi, result);
2954 return true;
2957 return false;
2960 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2961 doesn't fit into TYPE. The test for overflow should be regardless of
2962 -fwrapv, and even for unsigned types. */
2964 bool
2965 arith_overflowed_p (enum tree_code code, const_tree type,
2966 const_tree arg0, const_tree arg1)
2968 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2969 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2970 widest2_int_cst;
2971 widest2_int warg0 = widest2_int_cst (arg0);
2972 widest2_int warg1 = widest2_int_cst (arg1);
2973 widest2_int wres;
2974 switch (code)
2976 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2977 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2978 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2979 default: gcc_unreachable ();
2981 signop sign = TYPE_SIGN (type);
2982 if (sign == UNSIGNED && wi::neg_p (wres))
2983 return true;
2984 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2987 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2988 The statement may be replaced by another statement, e.g., if the call
2989 simplifies to a constant value. Return true if any changes were made.
2990 It is assumed that the operands have been previously folded. */
2992 static bool
2993 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2995 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2996 tree callee;
2997 bool changed = false;
2998 unsigned i;
3000 /* Fold *& in call arguments. */
3001 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3002 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3004 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3005 if (tmp)
3007 gimple_call_set_arg (stmt, i, tmp);
3008 changed = true;
3012 /* Check for virtual calls that became direct calls. */
3013 callee = gimple_call_fn (stmt);
3014 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3016 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3018 if (dump_file && virtual_method_call_p (callee)
3019 && !possible_polymorphic_call_target_p
3020 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3021 (OBJ_TYPE_REF_EXPR (callee)))))
3023 fprintf (dump_file,
3024 "Type inheritance inconsistent devirtualization of ");
3025 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3026 fprintf (dump_file, " to ");
3027 print_generic_expr (dump_file, callee, TDF_SLIM);
3028 fprintf (dump_file, "\n");
3031 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3032 changed = true;
3034 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3036 bool final;
3037 vec <cgraph_node *>targets
3038 = possible_polymorphic_call_targets (callee, stmt, &final);
3039 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3041 tree lhs = gimple_call_lhs (stmt);
3042 if (dump_enabled_p ())
3044 location_t loc = gimple_location_safe (stmt);
3045 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3046 "folding virtual function call to %s\n",
3047 targets.length () == 1
3048 ? targets[0]->name ()
3049 : "__builtin_unreachable");
3051 if (targets.length () == 1)
3053 gimple_call_set_fndecl (stmt, targets[0]->decl);
3054 changed = true;
3055 /* If the call becomes noreturn, remove the lhs. */
3056 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3058 if (TREE_CODE (lhs) == SSA_NAME)
3060 tree var = create_tmp_var (TREE_TYPE (lhs));
3061 tree def = get_or_create_ssa_default_def (cfun, var);
3062 gimple new_stmt = gimple_build_assign (lhs, def);
3063 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3065 gimple_call_set_lhs (stmt, NULL_TREE);
3067 maybe_remove_unused_call_args (cfun, stmt);
3069 else
3071 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3072 gimple new_stmt = gimple_build_call (fndecl, 0);
3073 gimple_set_location (new_stmt, gimple_location (stmt));
3074 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3076 tree var = create_tmp_var (TREE_TYPE (lhs));
3077 tree def = get_or_create_ssa_default_def (cfun, var);
3079 /* To satisfy condition for
3080 cgraph_update_edges_for_call_stmt_node,
3081 we need to preserve GIMPLE_CALL statement
3082 at position of GSI iterator. */
3083 update_call_from_tree (gsi, def);
3084 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3086 else
3088 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3089 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3090 gsi_replace (gsi, new_stmt, false);
3092 return true;
3098 /* Check for indirect calls that became direct calls, and then
3099 no longer require a static chain. */
3100 if (gimple_call_chain (stmt))
3102 tree fn = gimple_call_fndecl (stmt);
3103 if (fn && !DECL_STATIC_CHAIN (fn))
3105 gimple_call_set_chain (stmt, NULL);
3106 changed = true;
3108 else
3110 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3111 if (tmp)
3113 gimple_call_set_chain (stmt, tmp);
3114 changed = true;
3119 if (inplace)
3120 return changed;
3122 /* Check for builtins that CCP can handle using information not
3123 available in the generic fold routines. */
3124 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3126 if (gimple_fold_builtin (gsi))
3127 changed = true;
3129 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3131 changed |= targetm.gimple_fold_builtin (gsi);
3133 else if (gimple_call_internal_p (stmt))
3135 enum tree_code subcode = ERROR_MARK;
3136 tree result = NULL_TREE;
3137 bool cplx_result = false;
3138 tree overflow = NULL_TREE;
3139 switch (gimple_call_internal_fn (stmt))
3141 case IFN_BUILTIN_EXPECT:
3142 result = fold_builtin_expect (gimple_location (stmt),
3143 gimple_call_arg (stmt, 0),
3144 gimple_call_arg (stmt, 1),
3145 gimple_call_arg (stmt, 2));
3146 break;
3147 case IFN_UBSAN_OBJECT_SIZE:
3148 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3149 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3150 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3151 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3152 gimple_call_arg (stmt, 2))))
3154 gsi_replace (gsi, gimple_build_nop (), true);
3155 unlink_stmt_vdef (stmt);
3156 release_defs (stmt);
3157 return true;
3159 break;
3160 case IFN_UBSAN_CHECK_ADD:
3161 subcode = PLUS_EXPR;
3162 break;
3163 case IFN_UBSAN_CHECK_SUB:
3164 subcode = MINUS_EXPR;
3165 break;
3166 case IFN_UBSAN_CHECK_MUL:
3167 subcode = MULT_EXPR;
3168 break;
3169 case IFN_ADD_OVERFLOW:
3170 subcode = PLUS_EXPR;
3171 cplx_result = true;
3172 break;
3173 case IFN_SUB_OVERFLOW:
3174 subcode = MINUS_EXPR;
3175 cplx_result = true;
3176 break;
3177 case IFN_MUL_OVERFLOW:
3178 subcode = MULT_EXPR;
3179 cplx_result = true;
3180 break;
3181 default:
3182 break;
3184 if (subcode != ERROR_MARK)
3186 tree arg0 = gimple_call_arg (stmt, 0);
3187 tree arg1 = gimple_call_arg (stmt, 1);
3188 tree type = TREE_TYPE (arg0);
3189 if (cplx_result)
3191 tree lhs = gimple_call_lhs (stmt);
3192 if (lhs == NULL_TREE)
3193 type = NULL_TREE;
3194 else
3195 type = TREE_TYPE (TREE_TYPE (lhs));
3197 if (type == NULL_TREE)
3199 /* x = y + 0; x = y - 0; x = y * 0; */
3200 else if (integer_zerop (arg1))
3201 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3202 /* x = 0 + y; x = 0 * y; */
3203 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3204 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3205 /* x = y - y; */
3206 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3207 result = integer_zero_node;
3208 /* x = y * 1; x = 1 * y; */
3209 else if (subcode == MULT_EXPR && integer_onep (arg1))
3210 result = arg0;
3211 else if (subcode == MULT_EXPR && integer_onep (arg0))
3212 result = arg1;
3213 else if (TREE_CODE (arg0) == INTEGER_CST
3214 && TREE_CODE (arg1) == INTEGER_CST)
3216 if (cplx_result)
3217 result = int_const_binop (subcode, fold_convert (type, arg0),
3218 fold_convert (type, arg1));
3219 else
3220 result = int_const_binop (subcode, arg0, arg1);
3221 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3223 if (cplx_result)
3224 overflow = build_one_cst (type);
3225 else
3226 result = NULL_TREE;
3229 if (result)
3231 if (result == integer_zero_node)
3232 result = build_zero_cst (type);
3233 else if (cplx_result && TREE_TYPE (result) != type)
3235 if (TREE_CODE (result) == INTEGER_CST)
3237 if (arith_overflowed_p (PLUS_EXPR, type, result,
3238 integer_zero_node))
3239 overflow = build_one_cst (type);
3241 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3242 && TYPE_UNSIGNED (type))
3243 || (TYPE_PRECISION (type)
3244 < (TYPE_PRECISION (TREE_TYPE (result))
3245 + (TYPE_UNSIGNED (TREE_TYPE (result))
3246 && !TYPE_UNSIGNED (type)))))
3247 result = NULL_TREE;
3248 if (result)
3249 result = fold_convert (type, result);
3254 if (result)
3256 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3257 result = drop_tree_overflow (result);
3258 if (cplx_result)
3260 if (overflow == NULL_TREE)
3261 overflow = build_zero_cst (TREE_TYPE (result));
3262 tree ctype = build_complex_type (TREE_TYPE (result));
3263 if (TREE_CODE (result) == INTEGER_CST
3264 && TREE_CODE (overflow) == INTEGER_CST)
3265 result = build_complex (ctype, result, overflow);
3266 else
3267 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3268 ctype, result, overflow);
3270 if (!update_call_from_tree (gsi, result))
3271 gimplify_and_update_call_from_tree (gsi, result);
3272 changed = true;
3276 return changed;
3280 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3281 gimple_simplify.
3283 Replaces *GSI with the simplification result in RCODE and OPS
3284 and the associated statements in *SEQ. Does the replacement
3285 according to INPLACE and returns true if the operation succeeded. */
3287 static bool
3288 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3289 code_helper rcode, tree *ops,
3290 gimple_seq *seq, bool inplace)
3292 gimple stmt = gsi_stmt (*gsi);
3294 /* Play safe and do not allow abnormals to be mentioned in
3295 newly created statements. See also maybe_push_res_to_seq. */
3296 if ((TREE_CODE (ops[0]) == SSA_NAME
3297 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
3298 || (ops[1]
3299 && TREE_CODE (ops[1]) == SSA_NAME
3300 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
3301 || (ops[2]
3302 && TREE_CODE (ops[2]) == SSA_NAME
3303 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
3304 return false;
3306 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3308 gcc_assert (rcode.is_tree_code ());
3309 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3310 /* GIMPLE_CONDs condition may not throw. */
3311 && (!flag_exceptions
3312 || !cfun->can_throw_non_call_exceptions
3313 || !operation_could_trap_p (rcode,
3314 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3315 false, NULL_TREE)))
3316 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3317 else if (rcode == SSA_NAME)
3318 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3319 build_zero_cst (TREE_TYPE (ops[0])));
3320 else if (rcode == INTEGER_CST)
3322 if (integer_zerop (ops[0]))
3323 gimple_cond_make_false (cond_stmt);
3324 else
3325 gimple_cond_make_true (cond_stmt);
3327 else if (!inplace)
3329 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3330 ops, seq);
3331 if (!res)
3332 return false;
3333 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3334 build_zero_cst (TREE_TYPE (res)));
3336 else
3337 return false;
3338 if (dump_file && (dump_flags & TDF_DETAILS))
3340 fprintf (dump_file, "gimple_simplified to ");
3341 if (!gimple_seq_empty_p (*seq))
3342 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3343 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3344 0, TDF_SLIM);
3346 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3347 return true;
3349 else if (is_gimple_assign (stmt)
3350 && rcode.is_tree_code ())
3352 if (!inplace
3353 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3355 maybe_build_generic_op (rcode,
3356 TREE_TYPE (gimple_assign_lhs (stmt)),
3357 &ops[0], ops[1], ops[2]);
3358 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3359 if (dump_file && (dump_flags & TDF_DETAILS))
3361 fprintf (dump_file, "gimple_simplified to ");
3362 if (!gimple_seq_empty_p (*seq))
3363 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3364 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3365 0, TDF_SLIM);
3367 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3368 return true;
3371 else if (!inplace)
3373 if (gimple_has_lhs (stmt))
3375 tree lhs = gimple_get_lhs (stmt);
3376 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3377 ops, seq, lhs))
3378 return false;
3379 if (dump_file && (dump_flags & TDF_DETAILS))
3381 fprintf (dump_file, "gimple_simplified to ");
3382 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3384 gsi_replace_with_seq_vops (gsi, *seq);
3385 return true;
3387 else
3388 gcc_unreachable ();
3391 return false;
3394 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3396 static bool
3397 maybe_canonicalize_mem_ref_addr (tree *t)
3399 bool res = false;
3401 if (TREE_CODE (*t) == ADDR_EXPR)
3402 t = &TREE_OPERAND (*t, 0);
3404 while (handled_component_p (*t))
3405 t = &TREE_OPERAND (*t, 0);
3407 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3408 of invariant addresses into a SSA name MEM_REF address. */
3409 if (TREE_CODE (*t) == MEM_REF
3410 || TREE_CODE (*t) == TARGET_MEM_REF)
3412 tree addr = TREE_OPERAND (*t, 0);
3413 if (TREE_CODE (addr) == ADDR_EXPR
3414 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3415 || handled_component_p (TREE_OPERAND (addr, 0))))
3417 tree base;
3418 HOST_WIDE_INT coffset;
3419 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3420 &coffset);
3421 if (!base)
3422 gcc_unreachable ();
3424 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3425 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3426 TREE_OPERAND (*t, 1),
3427 size_int (coffset));
3428 res = true;
3430 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3431 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3434 /* Canonicalize back MEM_REFs to plain reference trees if the object
3435 accessed is a decl that has the same access semantics as the MEM_REF. */
3436 if (TREE_CODE (*t) == MEM_REF
3437 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3438 && integer_zerop (TREE_OPERAND (*t, 1))
3439 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3441 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3442 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3443 if (/* Same volatile qualification. */
3444 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3445 /* Same TBAA behavior with -fstrict-aliasing. */
3446 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3447 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3448 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3449 /* Same alignment. */
3450 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3451 /* We have to look out here to not drop a required conversion
3452 from the rhs to the lhs if *t appears on the lhs or vice-versa
3453 if it appears on the rhs. Thus require strict type
3454 compatibility. */
3455 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3457 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3458 res = true;
3462 /* Canonicalize TARGET_MEM_REF in particular with respect to
3463 the indexes becoming constant. */
3464 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3466 tree tem = maybe_fold_tmr (*t);
3467 if (tem)
3469 *t = tem;
3470 res = true;
3474 return res;
3477 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3478 distinguishes both cases. */
3480 static bool
3481 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3483 bool changed = false;
3484 gimple stmt = gsi_stmt (*gsi);
3485 unsigned i;
3487 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3488 after propagation.
3489 ??? This shouldn't be done in generic folding but in the
3490 propagation helpers which also know whether an address was
3491 propagated. */
3492 switch (gimple_code (stmt))
3494 case GIMPLE_ASSIGN:
3495 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3497 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3498 if ((REFERENCE_CLASS_P (*rhs)
3499 || TREE_CODE (*rhs) == ADDR_EXPR)
3500 && maybe_canonicalize_mem_ref_addr (rhs))
3501 changed = true;
3502 tree *lhs = gimple_assign_lhs_ptr (stmt);
3503 if (REFERENCE_CLASS_P (*lhs)
3504 && maybe_canonicalize_mem_ref_addr (lhs))
3505 changed = true;
3507 break;
3508 case GIMPLE_CALL:
3510 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3512 tree *arg = gimple_call_arg_ptr (stmt, i);
3513 if (REFERENCE_CLASS_P (*arg)
3514 && maybe_canonicalize_mem_ref_addr (arg))
3515 changed = true;
3517 tree *lhs = gimple_call_lhs_ptr (stmt);
3518 if (*lhs
3519 && REFERENCE_CLASS_P (*lhs)
3520 && maybe_canonicalize_mem_ref_addr (lhs))
3521 changed = true;
3522 break;
3524 case GIMPLE_ASM:
3526 gasm *asm_stmt = as_a <gasm *> (stmt);
3527 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3529 tree link = gimple_asm_output_op (asm_stmt, i);
3530 tree op = TREE_VALUE (link);
3531 if (REFERENCE_CLASS_P (op)
3532 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3533 changed = true;
3535 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3537 tree link = gimple_asm_input_op (asm_stmt, i);
3538 tree op = TREE_VALUE (link);
3539 if ((REFERENCE_CLASS_P (op)
3540 || TREE_CODE (op) == ADDR_EXPR)
3541 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3542 changed = true;
3545 break;
3546 case GIMPLE_DEBUG:
3547 if (gimple_debug_bind_p (stmt))
3549 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3550 if (*val
3551 && (REFERENCE_CLASS_P (*val)
3552 || TREE_CODE (*val) == ADDR_EXPR)
3553 && maybe_canonicalize_mem_ref_addr (val))
3554 changed = true;
3556 break;
3557 default:;
3560 /* Dispatch to pattern-based folding. */
3561 if (!inplace
3562 || is_gimple_assign (stmt)
3563 || gimple_code (stmt) == GIMPLE_COND)
3565 gimple_seq seq = NULL;
3566 code_helper rcode;
3567 tree ops[3] = {};
3568 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3570 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3571 changed = true;
3572 else
3573 gimple_seq_discard (seq);
3577 stmt = gsi_stmt (*gsi);
3579 /* Fold the main computation performed by the statement. */
3580 switch (gimple_code (stmt))
3582 case GIMPLE_ASSIGN:
3584 unsigned old_num_ops = gimple_num_ops (stmt);
3585 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3586 tree lhs = gimple_assign_lhs (stmt);
3587 tree new_rhs;
3588 /* First canonicalize operand order. This avoids building new
3589 trees if this is the only thing fold would later do. */
3590 if ((commutative_tree_code (subcode)
3591 || commutative_ternary_tree_code (subcode))
3592 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3593 gimple_assign_rhs2 (stmt), false))
3595 tree tem = gimple_assign_rhs1 (stmt);
3596 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3597 gimple_assign_set_rhs2 (stmt, tem);
3598 changed = true;
3600 new_rhs = fold_gimple_assign (gsi);
3601 if (new_rhs
3602 && !useless_type_conversion_p (TREE_TYPE (lhs),
3603 TREE_TYPE (new_rhs)))
3604 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3605 if (new_rhs
3606 && (!inplace
3607 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3609 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3610 changed = true;
3612 break;
3615 case GIMPLE_COND:
3616 changed |= fold_gimple_cond (as_a <gcond *> (stmt));
3617 break;
3619 case GIMPLE_CALL:
3620 changed |= gimple_fold_call (gsi, inplace);
3621 break;
3623 case GIMPLE_ASM:
3624 /* Fold *& in asm operands. */
3626 gasm *asm_stmt = as_a <gasm *> (stmt);
3627 size_t noutputs;
3628 const char **oconstraints;
3629 const char *constraint;
3630 bool allows_mem, allows_reg;
3632 noutputs = gimple_asm_noutputs (asm_stmt);
3633 oconstraints = XALLOCAVEC (const char *, noutputs);
3635 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3637 tree link = gimple_asm_output_op (asm_stmt, i);
3638 tree op = TREE_VALUE (link);
3639 oconstraints[i]
3640 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3641 if (REFERENCE_CLASS_P (op)
3642 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3644 TREE_VALUE (link) = op;
3645 changed = true;
3648 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3650 tree link = gimple_asm_input_op (asm_stmt, i);
3651 tree op = TREE_VALUE (link);
3652 constraint
3653 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3654 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3655 oconstraints, &allows_mem, &allows_reg);
3656 if (REFERENCE_CLASS_P (op)
3657 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3658 != NULL_TREE)
3660 TREE_VALUE (link) = op;
3661 changed = true;
3665 break;
3667 case GIMPLE_DEBUG:
3668 if (gimple_debug_bind_p (stmt))
3670 tree val = gimple_debug_bind_get_value (stmt);
3671 if (val
3672 && REFERENCE_CLASS_P (val))
3674 tree tem = maybe_fold_reference (val, false);
3675 if (tem)
3677 gimple_debug_bind_set_value (stmt, tem);
3678 changed = true;
3681 else if (val
3682 && TREE_CODE (val) == ADDR_EXPR)
3684 tree ref = TREE_OPERAND (val, 0);
3685 tree tem = maybe_fold_reference (ref, false);
3686 if (tem)
3688 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3689 gimple_debug_bind_set_value (stmt, tem);
3690 changed = true;
3694 break;
3696 default:;
3699 stmt = gsi_stmt (*gsi);
3701 /* Fold *& on the lhs. */
3702 if (gimple_has_lhs (stmt))
3704 tree lhs = gimple_get_lhs (stmt);
3705 if (lhs && REFERENCE_CLASS_P (lhs))
3707 tree new_lhs = maybe_fold_reference (lhs, true);
3708 if (new_lhs)
3710 gimple_set_lhs (stmt, new_lhs);
3711 changed = true;
3716 return changed;
3719 /* Valueziation callback that ends up not following SSA edges. */
3721 tree
3722 no_follow_ssa_edges (tree)
3724 return NULL_TREE;
3727 /* Valueization callback that ends up following single-use SSA edges only. */
3729 tree
3730 follow_single_use_edges (tree val)
3732 if (TREE_CODE (val) == SSA_NAME
3733 && !has_single_use (val))
3734 return NULL_TREE;
3735 return val;
3738 /* Fold the statement pointed to by GSI. In some cases, this function may
3739 replace the whole statement with a new one. Returns true iff folding
3740 makes any changes.
3741 The statement pointed to by GSI should be in valid gimple form but may
3742 be in unfolded state as resulting from for example constant propagation
3743 which can produce *&x = 0. */
3745 bool
3746 fold_stmt (gimple_stmt_iterator *gsi)
3748 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3751 bool
3752 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3754 return fold_stmt_1 (gsi, false, valueize);
3757 /* Perform the minimal folding on statement *GSI. Only operations like
3758 *&x created by constant propagation are handled. The statement cannot
3759 be replaced with a new one. Return true if the statement was
3760 changed, false otherwise.
3761 The statement *GSI should be in valid gimple form but may
3762 be in unfolded state as resulting from for example constant propagation
3763 which can produce *&x = 0. */
3765 bool
3766 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3768 gimple stmt = gsi_stmt (*gsi);
3769 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3770 gcc_assert (gsi_stmt (*gsi) == stmt);
3771 return changed;
3774 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3775 if EXPR is null or we don't know how.
3776 If non-null, the result always has boolean type. */
3778 static tree
3779 canonicalize_bool (tree expr, bool invert)
3781 if (!expr)
3782 return NULL_TREE;
3783 else if (invert)
3785 if (integer_nonzerop (expr))
3786 return boolean_false_node;
3787 else if (integer_zerop (expr))
3788 return boolean_true_node;
3789 else if (TREE_CODE (expr) == SSA_NAME)
3790 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3791 build_int_cst (TREE_TYPE (expr), 0));
3792 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3793 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3794 boolean_type_node,
3795 TREE_OPERAND (expr, 0),
3796 TREE_OPERAND (expr, 1));
3797 else
3798 return NULL_TREE;
3800 else
3802 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3803 return expr;
3804 if (integer_nonzerop (expr))
3805 return boolean_true_node;
3806 else if (integer_zerop (expr))
3807 return boolean_false_node;
3808 else if (TREE_CODE (expr) == SSA_NAME)
3809 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3810 build_int_cst (TREE_TYPE (expr), 0));
3811 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3812 return fold_build2 (TREE_CODE (expr),
3813 boolean_type_node,
3814 TREE_OPERAND (expr, 0),
3815 TREE_OPERAND (expr, 1));
3816 else
3817 return NULL_TREE;
3821 /* Check to see if a boolean expression EXPR is logically equivalent to the
3822 comparison (OP1 CODE OP2). Check for various identities involving
3823 SSA_NAMEs. */
3825 static bool
3826 same_bool_comparison_p (const_tree expr, enum tree_code code,
3827 const_tree op1, const_tree op2)
3829 gimple s;
3831 /* The obvious case. */
3832 if (TREE_CODE (expr) == code
3833 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3834 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3835 return true;
3837 /* Check for comparing (name, name != 0) and the case where expr
3838 is an SSA_NAME with a definition matching the comparison. */
3839 if (TREE_CODE (expr) == SSA_NAME
3840 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3842 if (operand_equal_p (expr, op1, 0))
3843 return ((code == NE_EXPR && integer_zerop (op2))
3844 || (code == EQ_EXPR && integer_nonzerop (op2)));
3845 s = SSA_NAME_DEF_STMT (expr);
3846 if (is_gimple_assign (s)
3847 && gimple_assign_rhs_code (s) == code
3848 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3849 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3850 return true;
3853 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3854 of name is a comparison, recurse. */
3855 if (TREE_CODE (op1) == SSA_NAME
3856 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3858 s = SSA_NAME_DEF_STMT (op1);
3859 if (is_gimple_assign (s)
3860 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3862 enum tree_code c = gimple_assign_rhs_code (s);
3863 if ((c == NE_EXPR && integer_zerop (op2))
3864 || (c == EQ_EXPR && integer_nonzerop (op2)))
3865 return same_bool_comparison_p (expr, c,
3866 gimple_assign_rhs1 (s),
3867 gimple_assign_rhs2 (s));
3868 if ((c == EQ_EXPR && integer_zerop (op2))
3869 || (c == NE_EXPR && integer_nonzerop (op2)))
3870 return same_bool_comparison_p (expr,
3871 invert_tree_comparison (c, false),
3872 gimple_assign_rhs1 (s),
3873 gimple_assign_rhs2 (s));
3876 return false;
3879 /* Check to see if two boolean expressions OP1 and OP2 are logically
3880 equivalent. */
3882 static bool
3883 same_bool_result_p (const_tree op1, const_tree op2)
3885 /* Simple cases first. */
3886 if (operand_equal_p (op1, op2, 0))
3887 return true;
3889 /* Check the cases where at least one of the operands is a comparison.
3890 These are a bit smarter than operand_equal_p in that they apply some
3891 identifies on SSA_NAMEs. */
3892 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3893 && same_bool_comparison_p (op1, TREE_CODE (op2),
3894 TREE_OPERAND (op2, 0),
3895 TREE_OPERAND (op2, 1)))
3896 return true;
3897 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3898 && same_bool_comparison_p (op2, TREE_CODE (op1),
3899 TREE_OPERAND (op1, 0),
3900 TREE_OPERAND (op1, 1)))
3901 return true;
3903 /* Default case. */
3904 return false;
3907 /* Forward declarations for some mutually recursive functions. */
3909 static tree
3910 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3911 enum tree_code code2, tree op2a, tree op2b);
3912 static tree
3913 and_var_with_comparison (tree var, bool invert,
3914 enum tree_code code2, tree op2a, tree op2b);
3915 static tree
3916 and_var_with_comparison_1 (gimple stmt,
3917 enum tree_code code2, tree op2a, tree op2b);
3918 static tree
3919 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3920 enum tree_code code2, tree op2a, tree op2b);
3921 static tree
3922 or_var_with_comparison (tree var, bool invert,
3923 enum tree_code code2, tree op2a, tree op2b);
3924 static tree
3925 or_var_with_comparison_1 (gimple stmt,
3926 enum tree_code code2, tree op2a, tree op2b);
3928 /* Helper function for and_comparisons_1: try to simplify the AND of the
3929 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3930 If INVERT is true, invert the value of the VAR before doing the AND.
3931 Return NULL_EXPR if we can't simplify this to a single expression. */
3933 static tree
3934 and_var_with_comparison (tree var, bool invert,
3935 enum tree_code code2, tree op2a, tree op2b)
3937 tree t;
3938 gimple stmt = SSA_NAME_DEF_STMT (var);
3940 /* We can only deal with variables whose definitions are assignments. */
3941 if (!is_gimple_assign (stmt))
3942 return NULL_TREE;
3944 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3945 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3946 Then we only have to consider the simpler non-inverted cases. */
3947 if (invert)
3948 t = or_var_with_comparison_1 (stmt,
3949 invert_tree_comparison (code2, false),
3950 op2a, op2b);
3951 else
3952 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3953 return canonicalize_bool (t, invert);
3956 /* Try to simplify the AND of the ssa variable defined by the assignment
3957 STMT with the comparison specified by (OP2A CODE2 OP2B).
3958 Return NULL_EXPR if we can't simplify this to a single expression. */
3960 static tree
3961 and_var_with_comparison_1 (gimple stmt,
3962 enum tree_code code2, tree op2a, tree op2b)
3964 tree var = gimple_assign_lhs (stmt);
3965 tree true_test_var = NULL_TREE;
3966 tree false_test_var = NULL_TREE;
3967 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3969 /* Check for identities like (var AND (var == 0)) => false. */
3970 if (TREE_CODE (op2a) == SSA_NAME
3971 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3973 if ((code2 == NE_EXPR && integer_zerop (op2b))
3974 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3976 true_test_var = op2a;
3977 if (var == true_test_var)
3978 return var;
3980 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3981 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3983 false_test_var = op2a;
3984 if (var == false_test_var)
3985 return boolean_false_node;
3989 /* If the definition is a comparison, recurse on it. */
3990 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3992 tree t = and_comparisons_1 (innercode,
3993 gimple_assign_rhs1 (stmt),
3994 gimple_assign_rhs2 (stmt),
3995 code2,
3996 op2a,
3997 op2b);
3998 if (t)
3999 return t;
4002 /* If the definition is an AND or OR expression, we may be able to
4003 simplify by reassociating. */
4004 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4005 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4007 tree inner1 = gimple_assign_rhs1 (stmt);
4008 tree inner2 = gimple_assign_rhs2 (stmt);
4009 gimple s;
4010 tree t;
4011 tree partial = NULL_TREE;
4012 bool is_and = (innercode == BIT_AND_EXPR);
4014 /* Check for boolean identities that don't require recursive examination
4015 of inner1/inner2:
4016 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4017 inner1 AND (inner1 OR inner2) => inner1
4018 !inner1 AND (inner1 AND inner2) => false
4019 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4020 Likewise for similar cases involving inner2. */
4021 if (inner1 == true_test_var)
4022 return (is_and ? var : inner1);
4023 else if (inner2 == true_test_var)
4024 return (is_and ? var : inner2);
4025 else if (inner1 == false_test_var)
4026 return (is_and
4027 ? boolean_false_node
4028 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4029 else if (inner2 == false_test_var)
4030 return (is_and
4031 ? boolean_false_node
4032 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4034 /* Next, redistribute/reassociate the AND across the inner tests.
4035 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4036 if (TREE_CODE (inner1) == SSA_NAME
4037 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4038 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4039 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4040 gimple_assign_rhs1 (s),
4041 gimple_assign_rhs2 (s),
4042 code2, op2a, op2b)))
4044 /* Handle the AND case, where we are reassociating:
4045 (inner1 AND inner2) AND (op2a code2 op2b)
4046 => (t AND inner2)
4047 If the partial result t is a constant, we win. Otherwise
4048 continue on to try reassociating with the other inner test. */
4049 if (is_and)
4051 if (integer_onep (t))
4052 return inner2;
4053 else if (integer_zerop (t))
4054 return boolean_false_node;
4057 /* Handle the OR case, where we are redistributing:
4058 (inner1 OR inner2) AND (op2a code2 op2b)
4059 => (t OR (inner2 AND (op2a code2 op2b))) */
4060 else if (integer_onep (t))
4061 return boolean_true_node;
4063 /* Save partial result for later. */
4064 partial = t;
4067 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4068 if (TREE_CODE (inner2) == SSA_NAME
4069 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4070 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4071 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4072 gimple_assign_rhs1 (s),
4073 gimple_assign_rhs2 (s),
4074 code2, op2a, op2b)))
4076 /* Handle the AND case, where we are reassociating:
4077 (inner1 AND inner2) AND (op2a code2 op2b)
4078 => (inner1 AND t) */
4079 if (is_and)
4081 if (integer_onep (t))
4082 return inner1;
4083 else if (integer_zerop (t))
4084 return boolean_false_node;
4085 /* If both are the same, we can apply the identity
4086 (x AND x) == x. */
4087 else if (partial && same_bool_result_p (t, partial))
4088 return t;
4091 /* Handle the OR case. where we are redistributing:
4092 (inner1 OR inner2) AND (op2a code2 op2b)
4093 => (t OR (inner1 AND (op2a code2 op2b)))
4094 => (t OR partial) */
4095 else
4097 if (integer_onep (t))
4098 return boolean_true_node;
4099 else if (partial)
4101 /* We already got a simplification for the other
4102 operand to the redistributed OR expression. The
4103 interesting case is when at least one is false.
4104 Or, if both are the same, we can apply the identity
4105 (x OR x) == x. */
4106 if (integer_zerop (partial))
4107 return t;
4108 else if (integer_zerop (t))
4109 return partial;
4110 else if (same_bool_result_p (t, partial))
4111 return t;
4116 return NULL_TREE;
4119 /* Try to simplify the AND of two comparisons defined by
4120 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4121 If this can be done without constructing an intermediate value,
4122 return the resulting tree; otherwise NULL_TREE is returned.
4123 This function is deliberately asymmetric as it recurses on SSA_DEFs
4124 in the first comparison but not the second. */
4126 static tree
4127 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4128 enum tree_code code2, tree op2a, tree op2b)
4130 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4132 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4133 if (operand_equal_p (op1a, op2a, 0)
4134 && operand_equal_p (op1b, op2b, 0))
4136 /* Result will be either NULL_TREE, or a combined comparison. */
4137 tree t = combine_comparisons (UNKNOWN_LOCATION,
4138 TRUTH_ANDIF_EXPR, code1, code2,
4139 truth_type, op1a, op1b);
4140 if (t)
4141 return t;
4144 /* Likewise the swapped case of the above. */
4145 if (operand_equal_p (op1a, op2b, 0)
4146 && operand_equal_p (op1b, op2a, 0))
4148 /* Result will be either NULL_TREE, or a combined comparison. */
4149 tree t = combine_comparisons (UNKNOWN_LOCATION,
4150 TRUTH_ANDIF_EXPR, code1,
4151 swap_tree_comparison (code2),
4152 truth_type, op1a, op1b);
4153 if (t)
4154 return t;
4157 /* If both comparisons are of the same value against constants, we might
4158 be able to merge them. */
4159 if (operand_equal_p (op1a, op2a, 0)
4160 && TREE_CODE (op1b) == INTEGER_CST
4161 && TREE_CODE (op2b) == INTEGER_CST)
4163 int cmp = tree_int_cst_compare (op1b, op2b);
4165 /* If we have (op1a == op1b), we should either be able to
4166 return that or FALSE, depending on whether the constant op1b
4167 also satisfies the other comparison against op2b. */
4168 if (code1 == EQ_EXPR)
4170 bool done = true;
4171 bool val;
4172 switch (code2)
4174 case EQ_EXPR: val = (cmp == 0); break;
4175 case NE_EXPR: val = (cmp != 0); break;
4176 case LT_EXPR: val = (cmp < 0); break;
4177 case GT_EXPR: val = (cmp > 0); break;
4178 case LE_EXPR: val = (cmp <= 0); break;
4179 case GE_EXPR: val = (cmp >= 0); break;
4180 default: done = false;
4182 if (done)
4184 if (val)
4185 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4186 else
4187 return boolean_false_node;
4190 /* Likewise if the second comparison is an == comparison. */
4191 else if (code2 == EQ_EXPR)
4193 bool done = true;
4194 bool val;
4195 switch (code1)
4197 case EQ_EXPR: val = (cmp == 0); break;
4198 case NE_EXPR: val = (cmp != 0); break;
4199 case LT_EXPR: val = (cmp > 0); break;
4200 case GT_EXPR: val = (cmp < 0); break;
4201 case LE_EXPR: val = (cmp >= 0); break;
4202 case GE_EXPR: val = (cmp <= 0); break;
4203 default: done = false;
4205 if (done)
4207 if (val)
4208 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4209 else
4210 return boolean_false_node;
4214 /* Same business with inequality tests. */
4215 else if (code1 == NE_EXPR)
4217 bool val;
4218 switch (code2)
4220 case EQ_EXPR: val = (cmp != 0); break;
4221 case NE_EXPR: val = (cmp == 0); break;
4222 case LT_EXPR: val = (cmp >= 0); break;
4223 case GT_EXPR: val = (cmp <= 0); break;
4224 case LE_EXPR: val = (cmp > 0); break;
4225 case GE_EXPR: val = (cmp < 0); break;
4226 default:
4227 val = false;
4229 if (val)
4230 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4232 else if (code2 == NE_EXPR)
4234 bool val;
4235 switch (code1)
4237 case EQ_EXPR: val = (cmp == 0); break;
4238 case NE_EXPR: val = (cmp != 0); break;
4239 case LT_EXPR: val = (cmp <= 0); break;
4240 case GT_EXPR: val = (cmp >= 0); break;
4241 case LE_EXPR: val = (cmp < 0); break;
4242 case GE_EXPR: val = (cmp > 0); break;
4243 default:
4244 val = false;
4246 if (val)
4247 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4250 /* Chose the more restrictive of two < or <= comparisons. */
4251 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4252 && (code2 == LT_EXPR || code2 == LE_EXPR))
4254 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4255 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4256 else
4257 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4260 /* Likewise chose the more restrictive of two > or >= comparisons. */
4261 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4262 && (code2 == GT_EXPR || code2 == GE_EXPR))
4264 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4265 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4266 else
4267 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4270 /* Check for singleton ranges. */
4271 else if (cmp == 0
4272 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4273 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4274 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4276 /* Check for disjoint ranges. */
4277 else if (cmp <= 0
4278 && (code1 == LT_EXPR || code1 == LE_EXPR)
4279 && (code2 == GT_EXPR || code2 == GE_EXPR))
4280 return boolean_false_node;
4281 else if (cmp >= 0
4282 && (code1 == GT_EXPR || code1 == GE_EXPR)
4283 && (code2 == LT_EXPR || code2 == LE_EXPR))
4284 return boolean_false_node;
4287 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4288 NAME's definition is a truth value. See if there are any simplifications
4289 that can be done against the NAME's definition. */
4290 if (TREE_CODE (op1a) == SSA_NAME
4291 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4292 && (integer_zerop (op1b) || integer_onep (op1b)))
4294 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4295 || (code1 == NE_EXPR && integer_onep (op1b)));
4296 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4297 switch (gimple_code (stmt))
4299 case GIMPLE_ASSIGN:
4300 /* Try to simplify by copy-propagating the definition. */
4301 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4303 case GIMPLE_PHI:
4304 /* If every argument to the PHI produces the same result when
4305 ANDed with the second comparison, we win.
4306 Do not do this unless the type is bool since we need a bool
4307 result here anyway. */
4308 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4310 tree result = NULL_TREE;
4311 unsigned i;
4312 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4314 tree arg = gimple_phi_arg_def (stmt, i);
4316 /* If this PHI has itself as an argument, ignore it.
4317 If all the other args produce the same result,
4318 we're still OK. */
4319 if (arg == gimple_phi_result (stmt))
4320 continue;
4321 else if (TREE_CODE (arg) == INTEGER_CST)
4323 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4325 if (!result)
4326 result = boolean_false_node;
4327 else if (!integer_zerop (result))
4328 return NULL_TREE;
4330 else if (!result)
4331 result = fold_build2 (code2, boolean_type_node,
4332 op2a, op2b);
4333 else if (!same_bool_comparison_p (result,
4334 code2, op2a, op2b))
4335 return NULL_TREE;
4337 else if (TREE_CODE (arg) == SSA_NAME
4338 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4340 tree temp;
4341 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4342 /* In simple cases we can look through PHI nodes,
4343 but we have to be careful with loops.
4344 See PR49073. */
4345 if (! dom_info_available_p (CDI_DOMINATORS)
4346 || gimple_bb (def_stmt) == gimple_bb (stmt)
4347 || dominated_by_p (CDI_DOMINATORS,
4348 gimple_bb (def_stmt),
4349 gimple_bb (stmt)))
4350 return NULL_TREE;
4351 temp = and_var_with_comparison (arg, invert, code2,
4352 op2a, op2b);
4353 if (!temp)
4354 return NULL_TREE;
4355 else if (!result)
4356 result = temp;
4357 else if (!same_bool_result_p (result, temp))
4358 return NULL_TREE;
4360 else
4361 return NULL_TREE;
4363 return result;
4366 default:
4367 break;
4370 return NULL_TREE;
4373 /* Try to simplify the AND of two comparisons, specified by
4374 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4375 If this can be simplified to a single expression (without requiring
4376 introducing more SSA variables to hold intermediate values),
4377 return the resulting tree. Otherwise return NULL_TREE.
4378 If the result expression is non-null, it has boolean type. */
4380 tree
4381 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4382 enum tree_code code2, tree op2a, tree op2b)
4384 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4385 if (t)
4386 return t;
4387 else
4388 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4391 /* Helper function for or_comparisons_1: try to simplify the OR of the
4392 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4393 If INVERT is true, invert the value of VAR before doing the OR.
4394 Return NULL_EXPR if we can't simplify this to a single expression. */
4396 static tree
4397 or_var_with_comparison (tree var, bool invert,
4398 enum tree_code code2, tree op2a, tree op2b)
4400 tree t;
4401 gimple stmt = SSA_NAME_DEF_STMT (var);
4403 /* We can only deal with variables whose definitions are assignments. */
4404 if (!is_gimple_assign (stmt))
4405 return NULL_TREE;
4407 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4408 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4409 Then we only have to consider the simpler non-inverted cases. */
4410 if (invert)
4411 t = and_var_with_comparison_1 (stmt,
4412 invert_tree_comparison (code2, false),
4413 op2a, op2b);
4414 else
4415 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4416 return canonicalize_bool (t, invert);
4419 /* Try to simplify the OR of the ssa variable defined by the assignment
4420 STMT with the comparison specified by (OP2A CODE2 OP2B).
4421 Return NULL_EXPR if we can't simplify this to a single expression. */
4423 static tree
4424 or_var_with_comparison_1 (gimple stmt,
4425 enum tree_code code2, tree op2a, tree op2b)
4427 tree var = gimple_assign_lhs (stmt);
4428 tree true_test_var = NULL_TREE;
4429 tree false_test_var = NULL_TREE;
4430 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4432 /* Check for identities like (var OR (var != 0)) => true . */
4433 if (TREE_CODE (op2a) == SSA_NAME
4434 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4436 if ((code2 == NE_EXPR && integer_zerop (op2b))
4437 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4439 true_test_var = op2a;
4440 if (var == true_test_var)
4441 return var;
4443 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4444 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4446 false_test_var = op2a;
4447 if (var == false_test_var)
4448 return boolean_true_node;
4452 /* If the definition is a comparison, recurse on it. */
4453 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4455 tree t = or_comparisons_1 (innercode,
4456 gimple_assign_rhs1 (stmt),
4457 gimple_assign_rhs2 (stmt),
4458 code2,
4459 op2a,
4460 op2b);
4461 if (t)
4462 return t;
4465 /* If the definition is an AND or OR expression, we may be able to
4466 simplify by reassociating. */
4467 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4468 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4470 tree inner1 = gimple_assign_rhs1 (stmt);
4471 tree inner2 = gimple_assign_rhs2 (stmt);
4472 gimple s;
4473 tree t;
4474 tree partial = NULL_TREE;
4475 bool is_or = (innercode == BIT_IOR_EXPR);
4477 /* Check for boolean identities that don't require recursive examination
4478 of inner1/inner2:
4479 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4480 inner1 OR (inner1 AND inner2) => inner1
4481 !inner1 OR (inner1 OR inner2) => true
4482 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4484 if (inner1 == true_test_var)
4485 return (is_or ? var : inner1);
4486 else if (inner2 == true_test_var)
4487 return (is_or ? var : inner2);
4488 else if (inner1 == false_test_var)
4489 return (is_or
4490 ? boolean_true_node
4491 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4492 else if (inner2 == false_test_var)
4493 return (is_or
4494 ? boolean_true_node
4495 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4497 /* Next, redistribute/reassociate the OR across the inner tests.
4498 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4499 if (TREE_CODE (inner1) == SSA_NAME
4500 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4501 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4502 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4503 gimple_assign_rhs1 (s),
4504 gimple_assign_rhs2 (s),
4505 code2, op2a, op2b)))
4507 /* Handle the OR case, where we are reassociating:
4508 (inner1 OR inner2) OR (op2a code2 op2b)
4509 => (t OR inner2)
4510 If the partial result t is a constant, we win. Otherwise
4511 continue on to try reassociating with the other inner test. */
4512 if (is_or)
4514 if (integer_onep (t))
4515 return boolean_true_node;
4516 else if (integer_zerop (t))
4517 return inner2;
4520 /* Handle the AND case, where we are redistributing:
4521 (inner1 AND inner2) OR (op2a code2 op2b)
4522 => (t AND (inner2 OR (op2a code op2b))) */
4523 else if (integer_zerop (t))
4524 return boolean_false_node;
4526 /* Save partial result for later. */
4527 partial = t;
4530 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4531 if (TREE_CODE (inner2) == SSA_NAME
4532 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4533 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4534 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4535 gimple_assign_rhs1 (s),
4536 gimple_assign_rhs2 (s),
4537 code2, op2a, op2b)))
4539 /* Handle the OR case, where we are reassociating:
4540 (inner1 OR inner2) OR (op2a code2 op2b)
4541 => (inner1 OR t)
4542 => (t OR partial) */
4543 if (is_or)
4545 if (integer_zerop (t))
4546 return inner1;
4547 else if (integer_onep (t))
4548 return boolean_true_node;
4549 /* If both are the same, we can apply the identity
4550 (x OR x) == x. */
4551 else if (partial && same_bool_result_p (t, partial))
4552 return t;
4555 /* Handle the AND case, where we are redistributing:
4556 (inner1 AND inner2) OR (op2a code2 op2b)
4557 => (t AND (inner1 OR (op2a code2 op2b)))
4558 => (t AND partial) */
4559 else
4561 if (integer_zerop (t))
4562 return boolean_false_node;
4563 else if (partial)
4565 /* We already got a simplification for the other
4566 operand to the redistributed AND expression. The
4567 interesting case is when at least one is true.
4568 Or, if both are the same, we can apply the identity
4569 (x AND x) == x. */
4570 if (integer_onep (partial))
4571 return t;
4572 else if (integer_onep (t))
4573 return partial;
4574 else if (same_bool_result_p (t, partial))
4575 return t;
4580 return NULL_TREE;
4583 /* Try to simplify the OR of two comparisons defined by
4584 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4585 If this can be done without constructing an intermediate value,
4586 return the resulting tree; otherwise NULL_TREE is returned.
4587 This function is deliberately asymmetric as it recurses on SSA_DEFs
4588 in the first comparison but not the second. */
4590 static tree
4591 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4592 enum tree_code code2, tree op2a, tree op2b)
4594 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4596 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4597 if (operand_equal_p (op1a, op2a, 0)
4598 && operand_equal_p (op1b, op2b, 0))
4600 /* Result will be either NULL_TREE, or a combined comparison. */
4601 tree t = combine_comparisons (UNKNOWN_LOCATION,
4602 TRUTH_ORIF_EXPR, code1, code2,
4603 truth_type, op1a, op1b);
4604 if (t)
4605 return t;
4608 /* Likewise the swapped case of the above. */
4609 if (operand_equal_p (op1a, op2b, 0)
4610 && operand_equal_p (op1b, op2a, 0))
4612 /* Result will be either NULL_TREE, or a combined comparison. */
4613 tree t = combine_comparisons (UNKNOWN_LOCATION,
4614 TRUTH_ORIF_EXPR, code1,
4615 swap_tree_comparison (code2),
4616 truth_type, op1a, op1b);
4617 if (t)
4618 return t;
4621 /* If both comparisons are of the same value against constants, we might
4622 be able to merge them. */
4623 if (operand_equal_p (op1a, op2a, 0)
4624 && TREE_CODE (op1b) == INTEGER_CST
4625 && TREE_CODE (op2b) == INTEGER_CST)
4627 int cmp = tree_int_cst_compare (op1b, op2b);
4629 /* If we have (op1a != op1b), we should either be able to
4630 return that or TRUE, depending on whether the constant op1b
4631 also satisfies the other comparison against op2b. */
4632 if (code1 == NE_EXPR)
4634 bool done = true;
4635 bool val;
4636 switch (code2)
4638 case EQ_EXPR: val = (cmp == 0); break;
4639 case NE_EXPR: val = (cmp != 0); break;
4640 case LT_EXPR: val = (cmp < 0); break;
4641 case GT_EXPR: val = (cmp > 0); break;
4642 case LE_EXPR: val = (cmp <= 0); break;
4643 case GE_EXPR: val = (cmp >= 0); break;
4644 default: done = false;
4646 if (done)
4648 if (val)
4649 return boolean_true_node;
4650 else
4651 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4654 /* Likewise if the second comparison is a != comparison. */
4655 else if (code2 == NE_EXPR)
4657 bool done = true;
4658 bool val;
4659 switch (code1)
4661 case EQ_EXPR: val = (cmp == 0); break;
4662 case NE_EXPR: val = (cmp != 0); break;
4663 case LT_EXPR: val = (cmp > 0); break;
4664 case GT_EXPR: val = (cmp < 0); break;
4665 case LE_EXPR: val = (cmp >= 0); break;
4666 case GE_EXPR: val = (cmp <= 0); break;
4667 default: done = false;
4669 if (done)
4671 if (val)
4672 return boolean_true_node;
4673 else
4674 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4678 /* See if an equality test is redundant with the other comparison. */
4679 else if (code1 == EQ_EXPR)
4681 bool val;
4682 switch (code2)
4684 case EQ_EXPR: val = (cmp == 0); break;
4685 case NE_EXPR: val = (cmp != 0); break;
4686 case LT_EXPR: val = (cmp < 0); break;
4687 case GT_EXPR: val = (cmp > 0); break;
4688 case LE_EXPR: val = (cmp <= 0); break;
4689 case GE_EXPR: val = (cmp >= 0); break;
4690 default:
4691 val = false;
4693 if (val)
4694 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4696 else if (code2 == EQ_EXPR)
4698 bool val;
4699 switch (code1)
4701 case EQ_EXPR: val = (cmp == 0); break;
4702 case NE_EXPR: val = (cmp != 0); break;
4703 case LT_EXPR: val = (cmp > 0); break;
4704 case GT_EXPR: val = (cmp < 0); break;
4705 case LE_EXPR: val = (cmp >= 0); break;
4706 case GE_EXPR: val = (cmp <= 0); break;
4707 default:
4708 val = false;
4710 if (val)
4711 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4714 /* Chose the less restrictive of two < or <= comparisons. */
4715 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4716 && (code2 == LT_EXPR || code2 == LE_EXPR))
4718 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4719 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4720 else
4721 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4724 /* Likewise chose the less restrictive of two > or >= comparisons. */
4725 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4726 && (code2 == GT_EXPR || code2 == GE_EXPR))
4728 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4729 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4730 else
4731 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4734 /* Check for singleton ranges. */
4735 else if (cmp == 0
4736 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4737 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4738 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4740 /* Check for less/greater pairs that don't restrict the range at all. */
4741 else if (cmp >= 0
4742 && (code1 == LT_EXPR || code1 == LE_EXPR)
4743 && (code2 == GT_EXPR || code2 == GE_EXPR))
4744 return boolean_true_node;
4745 else if (cmp <= 0
4746 && (code1 == GT_EXPR || code1 == GE_EXPR)
4747 && (code2 == LT_EXPR || code2 == LE_EXPR))
4748 return boolean_true_node;
4751 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4752 NAME's definition is a truth value. See if there are any simplifications
4753 that can be done against the NAME's definition. */
4754 if (TREE_CODE (op1a) == SSA_NAME
4755 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4756 && (integer_zerop (op1b) || integer_onep (op1b)))
4758 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4759 || (code1 == NE_EXPR && integer_onep (op1b)));
4760 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4761 switch (gimple_code (stmt))
4763 case GIMPLE_ASSIGN:
4764 /* Try to simplify by copy-propagating the definition. */
4765 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4767 case GIMPLE_PHI:
4768 /* If every argument to the PHI produces the same result when
4769 ORed with the second comparison, we win.
4770 Do not do this unless the type is bool since we need a bool
4771 result here anyway. */
4772 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4774 tree result = NULL_TREE;
4775 unsigned i;
4776 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4778 tree arg = gimple_phi_arg_def (stmt, i);
4780 /* If this PHI has itself as an argument, ignore it.
4781 If all the other args produce the same result,
4782 we're still OK. */
4783 if (arg == gimple_phi_result (stmt))
4784 continue;
4785 else if (TREE_CODE (arg) == INTEGER_CST)
4787 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4789 if (!result)
4790 result = boolean_true_node;
4791 else if (!integer_onep (result))
4792 return NULL_TREE;
4794 else if (!result)
4795 result = fold_build2 (code2, boolean_type_node,
4796 op2a, op2b);
4797 else if (!same_bool_comparison_p (result,
4798 code2, op2a, op2b))
4799 return NULL_TREE;
4801 else if (TREE_CODE (arg) == SSA_NAME
4802 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4804 tree temp;
4805 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4806 /* In simple cases we can look through PHI nodes,
4807 but we have to be careful with loops.
4808 See PR49073. */
4809 if (! dom_info_available_p (CDI_DOMINATORS)
4810 || gimple_bb (def_stmt) == gimple_bb (stmt)
4811 || dominated_by_p (CDI_DOMINATORS,
4812 gimple_bb (def_stmt),
4813 gimple_bb (stmt)))
4814 return NULL_TREE;
4815 temp = or_var_with_comparison (arg, invert, code2,
4816 op2a, op2b);
4817 if (!temp)
4818 return NULL_TREE;
4819 else if (!result)
4820 result = temp;
4821 else if (!same_bool_result_p (result, temp))
4822 return NULL_TREE;
4824 else
4825 return NULL_TREE;
4827 return result;
4830 default:
4831 break;
4834 return NULL_TREE;
4837 /* Try to simplify the OR of two comparisons, specified by
4838 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4839 If this can be simplified to a single expression (without requiring
4840 introducing more SSA variables to hold intermediate values),
4841 return the resulting tree. Otherwise return NULL_TREE.
4842 If the result expression is non-null, it has boolean type. */
4844 tree
4845 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4846 enum tree_code code2, tree op2a, tree op2b)
4848 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4849 if (t)
4850 return t;
4851 else
4852 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4856 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4858 Either NULL_TREE, a simplified but non-constant or a constant
4859 is returned.
4861 ??? This should go into a gimple-fold-inline.h file to be eventually
4862 privatized with the single valueize function used in the various TUs
4863 to avoid the indirect function call overhead. */
4865 tree
4866 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4867 tree (*gvalueize) (tree))
4869 code_helper rcode;
4870 tree ops[3] = {};
4871 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4872 edges if there are intermediate VARYING defs. For this reason
4873 do not follow SSA edges here even though SCCVN can technically
4874 just deal fine with that. */
4875 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize)
4876 && rcode.is_tree_code ()
4877 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4878 || ((tree_code) rcode) == ADDR_EXPR)
4879 && is_gimple_val (ops[0]))
4881 tree res = ops[0];
4882 if (dump_file && dump_flags & TDF_DETAILS)
4884 fprintf (dump_file, "Match-and-simplified ");
4885 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4886 fprintf (dump_file, " to ");
4887 print_generic_expr (dump_file, res, 0);
4888 fprintf (dump_file, "\n");
4890 return res;
4893 location_t loc = gimple_location (stmt);
4894 switch (gimple_code (stmt))
4896 case GIMPLE_ASSIGN:
4898 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4900 switch (get_gimple_rhs_class (subcode))
4902 case GIMPLE_SINGLE_RHS:
4904 tree rhs = gimple_assign_rhs1 (stmt);
4905 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4907 if (TREE_CODE (rhs) == SSA_NAME)
4909 /* If the RHS is an SSA_NAME, return its known constant value,
4910 if any. */
4911 return (*valueize) (rhs);
4913 /* Handle propagating invariant addresses into address
4914 operations. */
4915 else if (TREE_CODE (rhs) == ADDR_EXPR
4916 && !is_gimple_min_invariant (rhs))
4918 HOST_WIDE_INT offset = 0;
4919 tree base;
4920 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4921 &offset,
4922 valueize);
4923 if (base
4924 && (CONSTANT_CLASS_P (base)
4925 || decl_address_invariant_p (base)))
4926 return build_invariant_address (TREE_TYPE (rhs),
4927 base, offset);
4929 else if (TREE_CODE (rhs) == CONSTRUCTOR
4930 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4931 && (CONSTRUCTOR_NELTS (rhs)
4932 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4934 unsigned i;
4935 tree val, *vec;
4937 vec = XALLOCAVEC (tree,
4938 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4939 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4941 val = (*valueize) (val);
4942 if (TREE_CODE (val) == INTEGER_CST
4943 || TREE_CODE (val) == REAL_CST
4944 || TREE_CODE (val) == FIXED_CST)
4945 vec[i] = val;
4946 else
4947 return NULL_TREE;
4950 return build_vector (TREE_TYPE (rhs), vec);
4952 if (subcode == OBJ_TYPE_REF)
4954 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4955 /* If callee is constant, we can fold away the wrapper. */
4956 if (is_gimple_min_invariant (val))
4957 return val;
4960 if (kind == tcc_reference)
4962 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4963 || TREE_CODE (rhs) == REALPART_EXPR
4964 || TREE_CODE (rhs) == IMAGPART_EXPR)
4965 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4967 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4968 return fold_unary_loc (EXPR_LOCATION (rhs),
4969 TREE_CODE (rhs),
4970 TREE_TYPE (rhs), val);
4972 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4973 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4975 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4976 return fold_ternary_loc (EXPR_LOCATION (rhs),
4977 TREE_CODE (rhs),
4978 TREE_TYPE (rhs), val,
4979 TREE_OPERAND (rhs, 1),
4980 TREE_OPERAND (rhs, 2));
4982 else if (TREE_CODE (rhs) == MEM_REF
4983 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4985 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4986 if (TREE_CODE (val) == ADDR_EXPR
4987 && is_gimple_min_invariant (val))
4989 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4990 unshare_expr (val),
4991 TREE_OPERAND (rhs, 1));
4992 if (tem)
4993 rhs = tem;
4996 return fold_const_aggregate_ref_1 (rhs, valueize);
4998 else if (kind == tcc_declaration)
4999 return get_symbol_constant_value (rhs);
5000 return rhs;
5003 case GIMPLE_UNARY_RHS:
5004 return NULL_TREE;
5006 case GIMPLE_BINARY_RHS:
5008 /* Handle binary operators that can appear in GIMPLE form. */
5009 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5010 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5012 /* Translate &x + CST into an invariant form suitable for
5013 further propagation. */
5014 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
5015 && TREE_CODE (op0) == ADDR_EXPR
5016 && TREE_CODE (op1) == INTEGER_CST)
5018 tree off = fold_convert (ptr_type_node, op1);
5019 return build_fold_addr_expr_loc
5020 (loc,
5021 fold_build2 (MEM_REF,
5022 TREE_TYPE (TREE_TYPE (op0)),
5023 unshare_expr (op0), off));
5026 return NULL_TREE;
5029 case GIMPLE_TERNARY_RHS:
5030 return NULL_TREE;
5032 default:
5033 gcc_unreachable ();
5037 case GIMPLE_CALL:
5039 tree fn;
5040 gcall *call_stmt = as_a <gcall *> (stmt);
5042 if (gimple_call_internal_p (stmt))
5044 enum tree_code subcode = ERROR_MARK;
5045 switch (gimple_call_internal_fn (stmt))
5047 case IFN_UBSAN_CHECK_ADD:
5048 subcode = PLUS_EXPR;
5049 break;
5050 case IFN_UBSAN_CHECK_SUB:
5051 subcode = MINUS_EXPR;
5052 break;
5053 case IFN_UBSAN_CHECK_MUL:
5054 subcode = MULT_EXPR;
5055 break;
5056 default:
5057 return NULL_TREE;
5059 tree arg0 = gimple_call_arg (stmt, 0);
5060 tree arg1 = gimple_call_arg (stmt, 1);
5061 tree op0 = (*valueize) (arg0);
5062 tree op1 = (*valueize) (arg1);
5064 if (TREE_CODE (op0) != INTEGER_CST
5065 || TREE_CODE (op1) != INTEGER_CST)
5067 switch (subcode)
5069 case MULT_EXPR:
5070 /* x * 0 = 0 * x = 0 without overflow. */
5071 if (integer_zerop (op0) || integer_zerop (op1))
5072 return build_zero_cst (TREE_TYPE (arg0));
5073 break;
5074 case MINUS_EXPR:
5075 /* y - y = 0 without overflow. */
5076 if (operand_equal_p (op0, op1, 0))
5077 return build_zero_cst (TREE_TYPE (arg0));
5078 break;
5079 default:
5080 break;
5083 tree res
5084 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5085 if (res
5086 && TREE_CODE (res) == INTEGER_CST
5087 && !TREE_OVERFLOW (res))
5088 return res;
5089 return NULL_TREE;
5092 fn = (*valueize) (gimple_call_fn (stmt));
5093 if (TREE_CODE (fn) == ADDR_EXPR
5094 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5095 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5096 && gimple_builtin_call_types_compatible_p (stmt,
5097 TREE_OPERAND (fn, 0)))
5099 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5100 tree retval;
5101 unsigned i;
5102 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5103 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5104 retval = fold_builtin_call_array (loc,
5105 gimple_call_return_type (call_stmt),
5106 fn, gimple_call_num_args (stmt), args);
5107 if (retval)
5109 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5110 STRIP_NOPS (retval);
5111 retval = fold_convert (gimple_call_return_type (call_stmt),
5112 retval);
5114 return retval;
5116 return NULL_TREE;
5119 default:
5120 return NULL_TREE;
5124 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5125 Returns NULL_TREE if folding to a constant is not possible, otherwise
5126 returns a constant according to is_gimple_min_invariant. */
5128 tree
5129 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
5131 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5132 if (res && is_gimple_min_invariant (res))
5133 return res;
5134 return NULL_TREE;
5138 /* The following set of functions are supposed to fold references using
5139 their constant initializers. */
5141 /* See if we can find constructor defining value of BASE.
5142 When we know the consructor with constant offset (such as
5143 base is array[40] and we do know constructor of array), then
5144 BIT_OFFSET is adjusted accordingly.
5146 As a special case, return error_mark_node when constructor
5147 is not explicitly available, but it is known to be zero
5148 such as 'static const int a;'. */
5149 static tree
5150 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5151 tree (*valueize)(tree))
5153 HOST_WIDE_INT bit_offset2, size, max_size;
5154 if (TREE_CODE (base) == MEM_REF)
5156 if (!integer_zerop (TREE_OPERAND (base, 1)))
5158 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5159 return NULL_TREE;
5160 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5161 * BITS_PER_UNIT);
5164 if (valueize
5165 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5166 base = valueize (TREE_OPERAND (base, 0));
5167 if (!base || TREE_CODE (base) != ADDR_EXPR)
5168 return NULL_TREE;
5169 base = TREE_OPERAND (base, 0);
5172 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5173 DECL_INITIAL. If BASE is a nested reference into another
5174 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5175 the inner reference. */
5176 switch (TREE_CODE (base))
5178 case VAR_DECL:
5179 case CONST_DECL:
5181 tree init = ctor_for_folding (base);
5183 /* Our semantic is exact opposite of ctor_for_folding;
5184 NULL means unknown, while error_mark_node is 0. */
5185 if (init == error_mark_node)
5186 return NULL_TREE;
5187 if (!init)
5188 return error_mark_node;
5189 return init;
5192 case ARRAY_REF:
5193 case COMPONENT_REF:
5194 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5195 if (max_size == -1 || size != max_size)
5196 return NULL_TREE;
5197 *bit_offset += bit_offset2;
5198 return get_base_constructor (base, bit_offset, valueize);
5200 case STRING_CST:
5201 case CONSTRUCTOR:
5202 return base;
5204 default:
5205 return NULL_TREE;
5209 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5210 SIZE to the memory at bit OFFSET. */
5212 static tree
5213 fold_array_ctor_reference (tree type, tree ctor,
5214 unsigned HOST_WIDE_INT offset,
5215 unsigned HOST_WIDE_INT size,
5216 tree from_decl)
5218 unsigned HOST_WIDE_INT cnt;
5219 tree cfield, cval;
5220 offset_int low_bound;
5221 offset_int elt_size;
5222 offset_int index, max_index;
5223 offset_int access_index;
5224 tree domain_type = NULL_TREE, index_type = NULL_TREE;
5225 HOST_WIDE_INT inner_offset;
5227 /* Compute low bound and elt size. */
5228 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5229 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5230 if (domain_type && TYPE_MIN_VALUE (domain_type))
5232 /* Static constructors for variably sized objects makes no sense. */
5233 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5234 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5235 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5237 else
5238 low_bound = 0;
5239 /* Static constructors for variably sized objects makes no sense. */
5240 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5241 == INTEGER_CST);
5242 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5244 /* We can handle only constantly sized accesses that are known to not
5245 be larger than size of array element. */
5246 if (!TYPE_SIZE_UNIT (type)
5247 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5248 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5249 || elt_size == 0)
5250 return NULL_TREE;
5252 /* Compute the array index we look for. */
5253 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5254 elt_size);
5255 access_index += low_bound;
5256 if (index_type)
5257 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5258 TYPE_SIGN (index_type));
5260 /* And offset within the access. */
5261 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5263 /* See if the array field is large enough to span whole access. We do not
5264 care to fold accesses spanning multiple array indexes. */
5265 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5266 return NULL_TREE;
5268 index = low_bound - 1;
5269 if (index_type)
5270 index = wi::ext (index, TYPE_PRECISION (index_type),
5271 TYPE_SIGN (index_type));
5273 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5275 /* Array constructor might explicitely set index, or specify range
5276 or leave index NULL meaning that it is next index after previous
5277 one. */
5278 if (cfield)
5280 if (TREE_CODE (cfield) == INTEGER_CST)
5281 max_index = index = wi::to_offset (cfield);
5282 else
5284 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5285 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5286 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
5289 else
5291 index += 1;
5292 if (index_type)
5293 index = wi::ext (index, TYPE_PRECISION (index_type),
5294 TYPE_SIGN (index_type));
5295 max_index = index;
5298 /* Do we have match? */
5299 if (wi::cmpu (access_index, index) >= 0
5300 && wi::cmpu (access_index, max_index) <= 0)
5301 return fold_ctor_reference (type, cval, inner_offset, size,
5302 from_decl);
5304 /* When memory is not explicitely mentioned in constructor,
5305 it is 0 (or out of range). */
5306 return build_zero_cst (type);
5309 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5310 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5312 static tree
5313 fold_nonarray_ctor_reference (tree type, tree ctor,
5314 unsigned HOST_WIDE_INT offset,
5315 unsigned HOST_WIDE_INT size,
5316 tree from_decl)
5318 unsigned HOST_WIDE_INT cnt;
5319 tree cfield, cval;
5321 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5322 cval)
5324 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5325 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5326 tree field_size = DECL_SIZE (cfield);
5327 offset_int bitoffset;
5328 offset_int bitoffset_end, access_end;
5330 /* Variable sized objects in static constructors makes no sense,
5331 but field_size can be NULL for flexible array members. */
5332 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5333 && TREE_CODE (byte_offset) == INTEGER_CST
5334 && (field_size != NULL_TREE
5335 ? TREE_CODE (field_size) == INTEGER_CST
5336 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5338 /* Compute bit offset of the field. */
5339 bitoffset = (wi::to_offset (field_offset)
5340 + wi::lshift (wi::to_offset (byte_offset),
5341 LOG2_BITS_PER_UNIT));
5342 /* Compute bit offset where the field ends. */
5343 if (field_size != NULL_TREE)
5344 bitoffset_end = bitoffset + wi::to_offset (field_size);
5345 else
5346 bitoffset_end = 0;
5348 access_end = offset_int (offset) + size;
5350 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5351 [BITOFFSET, BITOFFSET_END)? */
5352 if (wi::cmps (access_end, bitoffset) > 0
5353 && (field_size == NULL_TREE
5354 || wi::lts_p (offset, bitoffset_end)))
5356 offset_int inner_offset = offset_int (offset) - bitoffset;
5357 /* We do have overlap. Now see if field is large enough to
5358 cover the access. Give up for accesses spanning multiple
5359 fields. */
5360 if (wi::cmps (access_end, bitoffset_end) > 0)
5361 return NULL_TREE;
5362 if (wi::lts_p (offset, bitoffset))
5363 return NULL_TREE;
5364 return fold_ctor_reference (type, cval,
5365 inner_offset.to_uhwi (), size,
5366 from_decl);
5369 /* When memory is not explicitely mentioned in constructor, it is 0. */
5370 return build_zero_cst (type);
5373 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5374 to the memory at bit OFFSET. */
5376 tree
5377 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5378 unsigned HOST_WIDE_INT size, tree from_decl)
5380 tree ret;
5382 /* We found the field with exact match. */
5383 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5384 && !offset)
5385 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5387 /* We are at the end of walk, see if we can view convert the
5388 result. */
5389 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5390 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5391 && !compare_tree_int (TYPE_SIZE (type), size)
5392 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5394 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5395 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5396 if (ret)
5397 STRIP_NOPS (ret);
5398 return ret;
5400 /* For constants and byte-aligned/sized reads try to go through
5401 native_encode/interpret. */
5402 if (CONSTANT_CLASS_P (ctor)
5403 && BITS_PER_UNIT == 8
5404 && offset % BITS_PER_UNIT == 0
5405 && size % BITS_PER_UNIT == 0
5406 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5408 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5409 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5410 offset / BITS_PER_UNIT) > 0)
5411 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5413 if (TREE_CODE (ctor) == CONSTRUCTOR)
5416 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5417 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5418 return fold_array_ctor_reference (type, ctor, offset, size,
5419 from_decl);
5420 else
5421 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5422 from_decl);
5425 return NULL_TREE;
5428 /* Return the tree representing the element referenced by T if T is an
5429 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5430 names using VALUEIZE. Return NULL_TREE otherwise. */
5432 tree
5433 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5435 tree ctor, idx, base;
5436 HOST_WIDE_INT offset, size, max_size;
5437 tree tem;
5439 if (TREE_THIS_VOLATILE (t))
5440 return NULL_TREE;
5442 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
5443 return get_symbol_constant_value (t);
5445 tem = fold_read_from_constant_string (t);
5446 if (tem)
5447 return tem;
5449 switch (TREE_CODE (t))
5451 case ARRAY_REF:
5452 case ARRAY_RANGE_REF:
5453 /* Constant indexes are handled well by get_base_constructor.
5454 Only special case variable offsets.
5455 FIXME: This code can't handle nested references with variable indexes
5456 (they will be handled only by iteration of ccp). Perhaps we can bring
5457 get_ref_base_and_extent here and make it use a valueize callback. */
5458 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5459 && valueize
5460 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5461 && TREE_CODE (idx) == INTEGER_CST)
5463 tree low_bound, unit_size;
5465 /* If the resulting bit-offset is constant, track it. */
5466 if ((low_bound = array_ref_low_bound (t),
5467 TREE_CODE (low_bound) == INTEGER_CST)
5468 && (unit_size = array_ref_element_size (t),
5469 tree_fits_uhwi_p (unit_size)))
5471 offset_int woffset
5472 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5473 TYPE_PRECISION (TREE_TYPE (idx)));
5475 if (wi::fits_shwi_p (woffset))
5477 offset = woffset.to_shwi ();
5478 /* TODO: This code seems wrong, multiply then check
5479 to see if it fits. */
5480 offset *= tree_to_uhwi (unit_size);
5481 offset *= BITS_PER_UNIT;
5483 base = TREE_OPERAND (t, 0);
5484 ctor = get_base_constructor (base, &offset, valueize);
5485 /* Empty constructor. Always fold to 0. */
5486 if (ctor == error_mark_node)
5487 return build_zero_cst (TREE_TYPE (t));
5488 /* Out of bound array access. Value is undefined,
5489 but don't fold. */
5490 if (offset < 0)
5491 return NULL_TREE;
5492 /* We can not determine ctor. */
5493 if (!ctor)
5494 return NULL_TREE;
5495 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5496 tree_to_uhwi (unit_size)
5497 * BITS_PER_UNIT,
5498 base);
5502 /* Fallthru. */
5504 case COMPONENT_REF:
5505 case BIT_FIELD_REF:
5506 case TARGET_MEM_REF:
5507 case MEM_REF:
5508 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5509 ctor = get_base_constructor (base, &offset, valueize);
5511 /* Empty constructor. Always fold to 0. */
5512 if (ctor == error_mark_node)
5513 return build_zero_cst (TREE_TYPE (t));
5514 /* We do not know precise address. */
5515 if (max_size == -1 || max_size != size)
5516 return NULL_TREE;
5517 /* We can not determine ctor. */
5518 if (!ctor)
5519 return NULL_TREE;
5521 /* Out of bound array access. Value is undefined, but don't fold. */
5522 if (offset < 0)
5523 return NULL_TREE;
5525 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5526 base);
5528 case REALPART_EXPR:
5529 case IMAGPART_EXPR:
5531 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5532 if (c && TREE_CODE (c) == COMPLEX_CST)
5533 return fold_build1_loc (EXPR_LOCATION (t),
5534 TREE_CODE (t), TREE_TYPE (t), c);
5535 break;
5538 default:
5539 break;
5542 return NULL_TREE;
5545 tree
5546 fold_const_aggregate_ref (tree t)
5548 return fold_const_aggregate_ref_1 (t, NULL);
5551 /* Lookup virtual method with index TOKEN in a virtual table V
5552 at OFFSET.
5553 Set CAN_REFER if non-NULL to false if method
5554 is not referable or if the virtual table is ill-formed (such as rewriten
5555 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5557 tree
5558 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5559 tree v,
5560 unsigned HOST_WIDE_INT offset,
5561 bool *can_refer)
5563 tree vtable = v, init, fn;
5564 unsigned HOST_WIDE_INT size;
5565 unsigned HOST_WIDE_INT elt_size, access_index;
5566 tree domain_type;
5568 if (can_refer)
5569 *can_refer = true;
5571 /* First of all double check we have virtual table. */
5572 if (TREE_CODE (v) != VAR_DECL
5573 || !DECL_VIRTUAL_P (v))
5575 /* Pass down that we lost track of the target. */
5576 if (can_refer)
5577 *can_refer = false;
5578 return NULL_TREE;
5581 init = ctor_for_folding (v);
5583 /* The virtual tables should always be born with constructors
5584 and we always should assume that they are avaialble for
5585 folding. At the moment we do not stream them in all cases,
5586 but it should never happen that ctor seem unreachable. */
5587 gcc_assert (init);
5588 if (init == error_mark_node)
5590 gcc_assert (in_lto_p);
5591 /* Pass down that we lost track of the target. */
5592 if (can_refer)
5593 *can_refer = false;
5594 return NULL_TREE;
5596 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5597 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5598 offset *= BITS_PER_UNIT;
5599 offset += token * size;
5601 /* Lookup the value in the constructor that is assumed to be array.
5602 This is equivalent to
5603 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5604 offset, size, NULL);
5605 but in a constant time. We expect that frontend produced a simple
5606 array without indexed initializers. */
5608 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5609 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5610 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5611 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5613 access_index = offset / BITS_PER_UNIT / elt_size;
5614 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5616 /* This code makes an assumption that there are no
5617 indexed fileds produced by C++ FE, so we can directly index the array. */
5618 if (access_index < CONSTRUCTOR_NELTS (init))
5620 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5621 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5622 STRIP_NOPS (fn);
5624 else
5625 fn = NULL;
5627 /* For type inconsistent program we may end up looking up virtual method
5628 in virtual table that does not contain TOKEN entries. We may overrun
5629 the virtual table and pick up a constant or RTTI info pointer.
5630 In any case the call is undefined. */
5631 if (!fn
5632 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5633 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5634 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5635 else
5637 fn = TREE_OPERAND (fn, 0);
5639 /* When cgraph node is missing and function is not public, we cannot
5640 devirtualize. This can happen in WHOPR when the actual method
5641 ends up in other partition, because we found devirtualization
5642 possibility too late. */
5643 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5645 if (can_refer)
5647 *can_refer = false;
5648 return fn;
5650 return NULL_TREE;
5654 /* Make sure we create a cgraph node for functions we'll reference.
5655 They can be non-existent if the reference comes from an entry
5656 of an external vtable for example. */
5657 cgraph_node::get_create (fn);
5659 return fn;
5662 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5663 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5664 KNOWN_BINFO carries the binfo describing the true type of
5665 OBJ_TYPE_REF_OBJECT(REF).
5666 Set CAN_REFER if non-NULL to false if method
5667 is not referable or if the virtual table is ill-formed (such as rewriten
5668 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5670 tree
5671 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5672 bool *can_refer)
5674 unsigned HOST_WIDE_INT offset;
5675 tree v;
5677 v = BINFO_VTABLE (known_binfo);
5678 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5679 if (!v)
5680 return NULL_TREE;
5682 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5684 if (can_refer)
5685 *can_refer = false;
5686 return NULL_TREE;
5688 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5691 /* Return true iff VAL is a gimple expression that is known to be
5692 non-negative. Restricted to floating-point inputs. */
5694 bool
5695 gimple_val_nonnegative_real_p (tree val)
5697 gimple def_stmt;
5699 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5701 /* Use existing logic for non-gimple trees. */
5702 if (tree_expr_nonnegative_p (val))
5703 return true;
5705 if (TREE_CODE (val) != SSA_NAME)
5706 return false;
5708 /* Currently we look only at the immediately defining statement
5709 to make this determination, since recursion on defining
5710 statements of operands can lead to quadratic behavior in the
5711 worst case. This is expected to catch almost all occurrences
5712 in practice. It would be possible to implement limited-depth
5713 recursion if important cases are lost. Alternatively, passes
5714 that need this information (such as the pow/powi lowering code
5715 in the cse_sincos pass) could be revised to provide it through
5716 dataflow propagation. */
5718 def_stmt = SSA_NAME_DEF_STMT (val);
5720 if (is_gimple_assign (def_stmt))
5722 tree op0, op1;
5724 /* See fold-const.c:tree_expr_nonnegative_p for additional
5725 cases that could be handled with recursion. */
5727 switch (gimple_assign_rhs_code (def_stmt))
5729 case ABS_EXPR:
5730 /* Always true for floating-point operands. */
5731 return true;
5733 case MULT_EXPR:
5734 /* True if the two operands are identical (since we are
5735 restricted to floating-point inputs). */
5736 op0 = gimple_assign_rhs1 (def_stmt);
5737 op1 = gimple_assign_rhs2 (def_stmt);
5739 if (op0 == op1
5740 || operand_equal_p (op0, op1, 0))
5741 return true;
5743 default:
5744 return false;
5747 else if (is_gimple_call (def_stmt))
5749 tree fndecl = gimple_call_fndecl (def_stmt);
5750 if (fndecl
5751 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5753 tree arg1;
5755 switch (DECL_FUNCTION_CODE (fndecl))
5757 CASE_FLT_FN (BUILT_IN_ACOS):
5758 CASE_FLT_FN (BUILT_IN_ACOSH):
5759 CASE_FLT_FN (BUILT_IN_CABS):
5760 CASE_FLT_FN (BUILT_IN_COSH):
5761 CASE_FLT_FN (BUILT_IN_ERFC):
5762 CASE_FLT_FN (BUILT_IN_EXP):
5763 CASE_FLT_FN (BUILT_IN_EXP10):
5764 CASE_FLT_FN (BUILT_IN_EXP2):
5765 CASE_FLT_FN (BUILT_IN_FABS):
5766 CASE_FLT_FN (BUILT_IN_FDIM):
5767 CASE_FLT_FN (BUILT_IN_HYPOT):
5768 CASE_FLT_FN (BUILT_IN_POW10):
5769 return true;
5771 CASE_FLT_FN (BUILT_IN_SQRT):
5772 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5773 nonnegative inputs. */
5774 if (!HONOR_SIGNED_ZEROS (val))
5775 return true;
5777 break;
5779 CASE_FLT_FN (BUILT_IN_POWI):
5780 /* True if the second argument is an even integer. */
5781 arg1 = gimple_call_arg (def_stmt, 1);
5783 if (TREE_CODE (arg1) == INTEGER_CST
5784 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5785 return true;
5787 break;
5789 CASE_FLT_FN (BUILT_IN_POW):
5790 /* True if the second argument is an even integer-valued
5791 real. */
5792 arg1 = gimple_call_arg (def_stmt, 1);
5794 if (TREE_CODE (arg1) == REAL_CST)
5796 REAL_VALUE_TYPE c;
5797 HOST_WIDE_INT n;
5799 c = TREE_REAL_CST (arg1);
5800 n = real_to_integer (&c);
5802 if ((n & 1) == 0)
5804 REAL_VALUE_TYPE cint;
5805 real_from_integer (&cint, VOIDmode, n, SIGNED);
5806 if (real_identical (&c, &cint))
5807 return true;
5811 break;
5813 default:
5814 return false;
5819 return false;
5822 /* Given a pointer value OP0, return a simplified version of an
5823 indirection through OP0, or NULL_TREE if no simplification is
5824 possible. Note that the resulting type may be different from
5825 the type pointed to in the sense that it is still compatible
5826 from the langhooks point of view. */
5828 tree
5829 gimple_fold_indirect_ref (tree t)
5831 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5832 tree sub = t;
5833 tree subtype;
5835 STRIP_NOPS (sub);
5836 subtype = TREE_TYPE (sub);
5837 if (!POINTER_TYPE_P (subtype))
5838 return NULL_TREE;
5840 if (TREE_CODE (sub) == ADDR_EXPR)
5842 tree op = TREE_OPERAND (sub, 0);
5843 tree optype = TREE_TYPE (op);
5844 /* *&p => p */
5845 if (useless_type_conversion_p (type, optype))
5846 return op;
5848 /* *(foo *)&fooarray => fooarray[0] */
5849 if (TREE_CODE (optype) == ARRAY_TYPE
5850 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5851 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5853 tree type_domain = TYPE_DOMAIN (optype);
5854 tree min_val = size_zero_node;
5855 if (type_domain && TYPE_MIN_VALUE (type_domain))
5856 min_val = TYPE_MIN_VALUE (type_domain);
5857 if (TREE_CODE (min_val) == INTEGER_CST)
5858 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5860 /* *(foo *)&complexfoo => __real__ complexfoo */
5861 else if (TREE_CODE (optype) == COMPLEX_TYPE
5862 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5863 return fold_build1 (REALPART_EXPR, type, op);
5864 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5865 else if (TREE_CODE (optype) == VECTOR_TYPE
5866 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5868 tree part_width = TYPE_SIZE (type);
5869 tree index = bitsize_int (0);
5870 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5874 /* *(p + CST) -> ... */
5875 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5876 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5878 tree addr = TREE_OPERAND (sub, 0);
5879 tree off = TREE_OPERAND (sub, 1);
5880 tree addrtype;
5882 STRIP_NOPS (addr);
5883 addrtype = TREE_TYPE (addr);
5885 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5886 if (TREE_CODE (addr) == ADDR_EXPR
5887 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5888 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5889 && tree_fits_uhwi_p (off))
5891 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5892 tree part_width = TYPE_SIZE (type);
5893 unsigned HOST_WIDE_INT part_widthi
5894 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5895 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5896 tree index = bitsize_int (indexi);
5897 if (offset / part_widthi
5898 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5899 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5900 part_width, index);
5903 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5904 if (TREE_CODE (addr) == ADDR_EXPR
5905 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5906 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5908 tree size = TYPE_SIZE_UNIT (type);
5909 if (tree_int_cst_equal (size, off))
5910 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5913 /* *(p + CST) -> MEM_REF <p, CST>. */
5914 if (TREE_CODE (addr) != ADDR_EXPR
5915 || DECL_P (TREE_OPERAND (addr, 0)))
5916 return fold_build2 (MEM_REF, type,
5917 addr,
5918 wide_int_to_tree (ptype, off));
5921 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5922 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5923 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5924 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5926 tree type_domain;
5927 tree min_val = size_zero_node;
5928 tree osub = sub;
5929 sub = gimple_fold_indirect_ref (sub);
5930 if (! sub)
5931 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5932 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5933 if (type_domain && TYPE_MIN_VALUE (type_domain))
5934 min_val = TYPE_MIN_VALUE (type_domain);
5935 if (TREE_CODE (min_val) == INTEGER_CST)
5936 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5939 return NULL_TREE;
5942 /* Return true if CODE is an operation that when operating on signed
5943 integer types involves undefined behavior on overflow and the
5944 operation can be expressed with unsigned arithmetic. */
5946 bool
5947 arith_code_with_undefined_signed_overflow (tree_code code)
5949 switch (code)
5951 case PLUS_EXPR:
5952 case MINUS_EXPR:
5953 case MULT_EXPR:
5954 case NEGATE_EXPR:
5955 case POINTER_PLUS_EXPR:
5956 return true;
5957 default:
5958 return false;
5962 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5963 operation that can be transformed to unsigned arithmetic by converting
5964 its operand, carrying out the operation in the corresponding unsigned
5965 type and converting the result back to the original type.
5967 Returns a sequence of statements that replace STMT and also contain
5968 a modified form of STMT itself. */
5970 gimple_seq
5971 rewrite_to_defined_overflow (gimple stmt)
5973 if (dump_file && (dump_flags & TDF_DETAILS))
5975 fprintf (dump_file, "rewriting stmt with undefined signed "
5976 "overflow ");
5977 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5980 tree lhs = gimple_assign_lhs (stmt);
5981 tree type = unsigned_type_for (TREE_TYPE (lhs));
5982 gimple_seq stmts = NULL;
5983 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5985 gimple_seq stmts2 = NULL;
5986 gimple_set_op (stmt, i,
5987 force_gimple_operand (fold_convert (type,
5988 gimple_op (stmt, i)),
5989 &stmts2, true, NULL_TREE));
5990 gimple_seq_add_seq (&stmts, stmts2);
5992 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5993 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5994 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5995 gimple_seq_add_stmt (&stmts, stmt);
5996 gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5997 gimple_seq_add_stmt (&stmts, cvt);
5999 return stmts;
6003 /* Build the expression CODE OP0 of type TYPE with location LOC,
6004 simplifying it first if possible using VALUEIZE if not NULL.
6005 OP0 is expected to be valueized already. Returns the built
6006 expression value and appends statements possibly defining it
6007 to SEQ. */
6009 tree
6010 gimple_build (gimple_seq *seq, location_t loc,
6011 enum tree_code code, tree type, tree op0,
6012 tree (*valueize)(tree))
6014 tree res = gimple_simplify (code, type, op0, seq, valueize);
6015 if (!res)
6017 if (gimple_in_ssa_p (cfun))
6018 res = make_ssa_name (type);
6019 else
6020 res = create_tmp_reg (type);
6021 gimple stmt;
6022 if (code == REALPART_EXPR
6023 || code == IMAGPART_EXPR
6024 || code == VIEW_CONVERT_EXPR)
6025 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6026 else
6027 stmt = gimple_build_assign (res, code, op0);
6028 gimple_set_location (stmt, loc);
6029 gimple_seq_add_stmt_without_update (seq, stmt);
6031 return res;
6034 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6035 simplifying it first if possible using VALUEIZE if not NULL.
6036 OP0 and OP1 are expected to be valueized already. Returns the built
6037 expression value and appends statements possibly defining it
6038 to SEQ. */
6040 tree
6041 gimple_build (gimple_seq *seq, location_t loc,
6042 enum tree_code code, tree type, tree op0, tree op1,
6043 tree (*valueize)(tree))
6045 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
6046 if (!res)
6048 if (gimple_in_ssa_p (cfun))
6049 res = make_ssa_name (type);
6050 else
6051 res = create_tmp_reg (type);
6052 gimple stmt = gimple_build_assign (res, code, op0, op1);
6053 gimple_set_location (stmt, loc);
6054 gimple_seq_add_stmt_without_update (seq, stmt);
6056 return res;
6059 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6060 simplifying it first if possible using VALUEIZE if not NULL.
6061 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
6062 expression value and appends statements possibly defining it
6063 to SEQ. */
6065 tree
6066 gimple_build (gimple_seq *seq, location_t loc,
6067 enum tree_code code, tree type, tree op0, tree op1, tree op2,
6068 tree (*valueize)(tree))
6070 tree res = gimple_simplify (code, type, op0, op1, op2,
6071 seq, valueize);
6072 if (!res)
6074 if (gimple_in_ssa_p (cfun))
6075 res = make_ssa_name (type);
6076 else
6077 res = create_tmp_reg (type);
6078 gimple stmt;
6079 if (code == BIT_FIELD_REF)
6080 stmt = gimple_build_assign (res, code,
6081 build3 (code, type, op0, op1, op2));
6082 else
6083 stmt = gimple_build_assign (res, code, op0, op1, op2);
6084 gimple_set_location (stmt, loc);
6085 gimple_seq_add_stmt_without_update (seq, stmt);
6087 return res;
6090 /* Build the call FN (ARG0) with a result of type TYPE
6091 (or no result if TYPE is void) with location LOC,
6092 simplifying it first if possible using VALUEIZE if not NULL.
6093 ARG0 is expected to be valueized already. Returns the built
6094 expression value (or NULL_TREE if TYPE is void) and appends
6095 statements possibly defining it to SEQ. */
6097 tree
6098 gimple_build (gimple_seq *seq, location_t loc,
6099 enum built_in_function fn, tree type, tree arg0,
6100 tree (*valueize)(tree))
6102 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
6103 if (!res)
6105 tree decl = builtin_decl_implicit (fn);
6106 gimple stmt = gimple_build_call (decl, 1, arg0);
6107 if (!VOID_TYPE_P (type))
6109 if (gimple_in_ssa_p (cfun))
6110 res = make_ssa_name (type);
6111 else
6112 res = create_tmp_reg (type);
6113 gimple_call_set_lhs (stmt, res);
6115 gimple_set_location (stmt, loc);
6116 gimple_seq_add_stmt_without_update (seq, stmt);
6118 return res;
6121 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6122 (or no result if TYPE is void) with location LOC,
6123 simplifying it first if possible using VALUEIZE if not NULL.
6124 ARG0 is expected to be valueized already. Returns the built
6125 expression value (or NULL_TREE if TYPE is void) and appends
6126 statements possibly defining it to SEQ. */
6128 tree
6129 gimple_build (gimple_seq *seq, location_t loc,
6130 enum built_in_function fn, tree type, tree arg0, tree arg1,
6131 tree (*valueize)(tree))
6133 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
6134 if (!res)
6136 tree decl = builtin_decl_implicit (fn);
6137 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
6138 if (!VOID_TYPE_P (type))
6140 if (gimple_in_ssa_p (cfun))
6141 res = make_ssa_name (type);
6142 else
6143 res = create_tmp_reg (type);
6144 gimple_call_set_lhs (stmt, res);
6146 gimple_set_location (stmt, loc);
6147 gimple_seq_add_stmt_without_update (seq, stmt);
6149 return res;
6152 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6153 (or no result if TYPE is void) with location LOC,
6154 simplifying it first if possible using VALUEIZE if not NULL.
6155 ARG0 is expected to be valueized already. Returns the built
6156 expression value (or NULL_TREE if TYPE is void) and appends
6157 statements possibly defining it to SEQ. */
6159 tree
6160 gimple_build (gimple_seq *seq, location_t loc,
6161 enum built_in_function fn, tree type,
6162 tree arg0, tree arg1, tree arg2,
6163 tree (*valueize)(tree))
6165 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
6166 if (!res)
6168 tree decl = builtin_decl_implicit (fn);
6169 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6170 if (!VOID_TYPE_P (type))
6172 if (gimple_in_ssa_p (cfun))
6173 res = make_ssa_name (type);
6174 else
6175 res = create_tmp_reg (type);
6176 gimple_call_set_lhs (stmt, res);
6178 gimple_set_location (stmt, loc);
6179 gimple_seq_add_stmt_without_update (seq, stmt);
6181 return res;
6184 /* Build the conversion (TYPE) OP with a result of type TYPE
6185 with location LOC if such conversion is neccesary in GIMPLE,
6186 simplifying it first.
6187 Returns the built expression value and appends
6188 statements possibly defining it to SEQ. */
6190 tree
6191 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6193 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6194 return op;
6195 return gimple_build (seq, loc, NOP_EXPR, type, op);