* gcc.dg/vmx/unpack.c: Use dg-additional-options rather than
[official-gcc.git] / gcc / gimple-fold.c
blobe19700de2ab348078ed0d658231205ae8200e6bf
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "predict.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "rtl.h"
29 #include "ssa.h"
30 #include "alias.h"
31 #include "fold-const.h"
32 #include "flags.h"
33 #include "insn-config.h"
34 #include "expmed.h"
35 #include "dojump.h"
36 #include "explow.h"
37 #include "calls.h"
38 #include "emit-rtl.h"
39 #include "varasm.h"
40 #include "stmt.h"
41 #include "expr.h"
42 #include "stor-layout.h"
43 #include "dumpfile.h"
44 #include "dominance.h"
45 #include "internal-fn.h"
46 #include "gimple-fold.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "tree-into-ssa.h"
50 #include "tree-dfa.h"
51 #include "tree-ssa.h"
52 #include "tree-ssa-propagate.h"
53 #include "target.h"
54 #include "cgraph.h"
55 #include "ipa-utils.h"
56 #include "gimple-pretty-print.h"
57 #include "tree-ssa-address.h"
58 #include "langhooks.h"
59 #include "gimplify-me.h"
60 #include "dbgcnt.h"
61 #include "builtins.h"
62 #include "output.h"
63 #include "tree-eh.h"
64 #include "gimple-match.h"
66 /* Return true when DECL can be referenced from current unit.
67 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
68 We can get declarations that are not possible to reference for various
69 reasons:
71 1) When analyzing C++ virtual tables.
72 C++ virtual tables do have known constructors even
73 when they are keyed to other compilation unit.
74 Those tables can contain pointers to methods and vars
75 in other units. Those methods have both STATIC and EXTERNAL
76 set.
77 2) In WHOPR mode devirtualization might lead to reference
78 to method that was partitioned elsehwere.
79 In this case we have static VAR_DECL or FUNCTION_DECL
80 that has no corresponding callgraph/varpool node
81 declaring the body.
82 3) COMDAT functions referred by external vtables that
83 we devirtualize only during final compilation stage.
84 At this time we already decided that we will not output
85 the function body and thus we can't reference the symbol
86 directly. */
88 static bool
89 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
91 varpool_node *vnode;
92 struct cgraph_node *node;
93 symtab_node *snode;
95 if (DECL_ABSTRACT_P (decl))
96 return false;
98 /* We are concerned only about static/external vars and functions. */
99 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
100 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
101 return true;
103 /* Static objects can be referred only if they was not optimized out yet. */
104 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
106 /* Before we start optimizing unreachable code we can be sure all
107 static objects are defined. */
108 if (symtab->function_flags_ready)
109 return true;
110 snode = symtab_node::get (decl);
111 if (!snode || !snode->definition)
112 return false;
113 node = dyn_cast <cgraph_node *> (snode);
114 return !node || !node->global.inlined_to;
117 /* We will later output the initializer, so we can refer to it.
118 So we are concerned only when DECL comes from initializer of
119 external var or var that has been optimized out. */
120 if (!from_decl
121 || TREE_CODE (from_decl) != VAR_DECL
122 || (!DECL_EXTERNAL (from_decl)
123 && (vnode = varpool_node::get (from_decl)) != NULL
124 && vnode->definition)
125 || (flag_ltrans
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->in_other_partition))
128 return true;
129 /* We are folding reference from external vtable. The vtable may reffer
130 to a symbol keyed to other compilation unit. The other compilation
131 unit may be in separate DSO and the symbol may be hidden. */
132 if (DECL_VISIBILITY_SPECIFIED (decl)
133 && DECL_EXTERNAL (decl)
134 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
135 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
136 return false;
137 /* When function is public, we always can introduce new reference.
138 Exception are the COMDAT functions where introducing a direct
139 reference imply need to include function body in the curren tunit. */
140 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
141 return true;
142 /* We have COMDAT. We are going to check if we still have definition
143 or if the definition is going to be output in other partition.
144 Bypass this when gimplifying; all needed functions will be produced.
146 As observed in PR20991 for already optimized out comdat virtual functions
147 it may be tempting to not necessarily give up because the copy will be
148 output elsewhere when corresponding vtable is output.
149 This is however not possible - ABI specify that COMDATs are output in
150 units where they are used and when the other unit was compiled with LTO
151 it is possible that vtable was kept public while the function itself
152 was privatized. */
153 if (!symtab->function_flags_ready)
154 return true;
156 snode = symtab_node::get (decl);
157 if (!snode
158 || ((!snode->definition || DECL_EXTERNAL (decl))
159 && (!snode->in_other_partition
160 || (!snode->forced_by_abi && !snode->force_output))))
161 return false;
162 node = dyn_cast <cgraph_node *> (snode);
163 return !node || !node->global.inlined_to;
166 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
167 acceptable form for is_gimple_min_invariant.
168 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
170 tree
171 canonicalize_constructor_val (tree cval, tree from_decl)
173 tree orig_cval = cval;
174 STRIP_NOPS (cval);
175 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
176 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
178 tree ptr = TREE_OPERAND (cval, 0);
179 if (is_gimple_min_invariant (ptr))
180 cval = build1_loc (EXPR_LOCATION (cval),
181 ADDR_EXPR, TREE_TYPE (ptr),
182 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
183 ptr,
184 fold_convert (ptr_type_node,
185 TREE_OPERAND (cval, 1))));
187 if (TREE_CODE (cval) == ADDR_EXPR)
189 tree base = NULL_TREE;
190 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
192 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
193 if (base)
194 TREE_OPERAND (cval, 0) = base;
196 else
197 base = get_base_address (TREE_OPERAND (cval, 0));
198 if (!base)
199 return NULL_TREE;
201 if ((TREE_CODE (base) == VAR_DECL
202 || TREE_CODE (base) == FUNCTION_DECL)
203 && !can_refer_decl_in_current_unit_p (base, from_decl))
204 return NULL_TREE;
205 if (TREE_CODE (base) == VAR_DECL)
206 TREE_ADDRESSABLE (base) = 1;
207 else if (TREE_CODE (base) == FUNCTION_DECL)
209 /* Make sure we create a cgraph node for functions we'll reference.
210 They can be non-existent if the reference comes from an entry
211 of an external vtable for example. */
212 cgraph_node::get_create (base);
214 /* Fixup types in global initializers. */
215 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
216 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
218 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
219 cval = fold_convert (TREE_TYPE (orig_cval), cval);
220 return cval;
222 if (TREE_OVERFLOW_P (cval))
223 return drop_tree_overflow (cval);
224 return orig_cval;
227 /* If SYM is a constant variable with known value, return the value.
228 NULL_TREE is returned otherwise. */
230 tree
231 get_symbol_constant_value (tree sym)
233 tree val = ctor_for_folding (sym);
234 if (val != error_mark_node)
236 if (val)
238 val = canonicalize_constructor_val (unshare_expr (val), sym);
239 if (val && is_gimple_min_invariant (val))
240 return val;
241 else
242 return NULL_TREE;
244 /* Variables declared 'const' without an initializer
245 have zero as the initializer if they may not be
246 overridden at link or run time. */
247 if (!val
248 && is_gimple_reg_type (TREE_TYPE (sym)))
249 return build_zero_cst (TREE_TYPE (sym));
252 return NULL_TREE;
257 /* Subroutine of fold_stmt. We perform several simplifications of the
258 memory reference tree EXPR and make sure to re-gimplify them properly
259 after propagation of constant addresses. IS_LHS is true if the
260 reference is supposed to be an lvalue. */
262 static tree
263 maybe_fold_reference (tree expr, bool is_lhs)
265 tree result;
267 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
268 || TREE_CODE (expr) == REALPART_EXPR
269 || TREE_CODE (expr) == IMAGPART_EXPR)
270 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
271 return fold_unary_loc (EXPR_LOCATION (expr),
272 TREE_CODE (expr),
273 TREE_TYPE (expr),
274 TREE_OPERAND (expr, 0));
275 else if (TREE_CODE (expr) == BIT_FIELD_REF
276 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
277 return fold_ternary_loc (EXPR_LOCATION (expr),
278 TREE_CODE (expr),
279 TREE_TYPE (expr),
280 TREE_OPERAND (expr, 0),
281 TREE_OPERAND (expr, 1),
282 TREE_OPERAND (expr, 2));
284 if (!is_lhs
285 && (result = fold_const_aggregate_ref (expr))
286 && is_gimple_min_invariant (result))
287 return result;
289 return NULL_TREE;
293 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
294 replacement rhs for the statement or NULL_TREE if no simplification
295 could be made. It is assumed that the operands have been previously
296 folded. */
298 static tree
299 fold_gimple_assign (gimple_stmt_iterator *si)
301 gimple stmt = gsi_stmt (*si);
302 enum tree_code subcode = gimple_assign_rhs_code (stmt);
303 location_t loc = gimple_location (stmt);
305 tree result = NULL_TREE;
307 switch (get_gimple_rhs_class (subcode))
309 case GIMPLE_SINGLE_RHS:
311 tree rhs = gimple_assign_rhs1 (stmt);
313 if (TREE_CLOBBER_P (rhs))
314 return NULL_TREE;
316 if (REFERENCE_CLASS_P (rhs))
317 return maybe_fold_reference (rhs, false);
319 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
321 tree val = OBJ_TYPE_REF_EXPR (rhs);
322 if (is_gimple_min_invariant (val))
323 return val;
324 else if (flag_devirtualize && virtual_method_call_p (rhs))
326 bool final;
327 vec <cgraph_node *>targets
328 = possible_polymorphic_call_targets (rhs, stmt, &final);
329 if (final && targets.length () <= 1 && dbg_cnt (devirt))
331 if (dump_enabled_p ())
333 location_t loc = gimple_location_safe (stmt);
334 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
335 "resolving virtual function address "
336 "reference to function %s\n",
337 targets.length () == 1
338 ? targets[0]->name ()
339 : "NULL");
341 if (targets.length () == 1)
343 val = fold_convert (TREE_TYPE (val),
344 build_fold_addr_expr_loc
345 (loc, targets[0]->decl));
346 STRIP_USELESS_TYPE_CONVERSION (val);
348 else
349 /* We can not use __builtin_unreachable here because it
350 can not have address taken. */
351 val = build_int_cst (TREE_TYPE (val), 0);
352 return val;
357 else if (TREE_CODE (rhs) == ADDR_EXPR)
359 tree ref = TREE_OPERAND (rhs, 0);
360 tree tem = maybe_fold_reference (ref, true);
361 if (tem
362 && TREE_CODE (tem) == MEM_REF
363 && integer_zerop (TREE_OPERAND (tem, 1)))
364 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
365 else if (tem)
366 result = fold_convert (TREE_TYPE (rhs),
367 build_fold_addr_expr_loc (loc, tem));
368 else if (TREE_CODE (ref) == MEM_REF
369 && integer_zerop (TREE_OPERAND (ref, 1)))
370 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
373 else if (TREE_CODE (rhs) == CONSTRUCTOR
374 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
375 && (CONSTRUCTOR_NELTS (rhs)
376 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
378 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
379 unsigned i;
380 tree val;
382 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
383 if (TREE_CODE (val) != INTEGER_CST
384 && TREE_CODE (val) != REAL_CST
385 && TREE_CODE (val) != FIXED_CST)
386 return NULL_TREE;
388 return build_vector_from_ctor (TREE_TYPE (rhs),
389 CONSTRUCTOR_ELTS (rhs));
392 else if (DECL_P (rhs))
393 return get_symbol_constant_value (rhs);
395 /* If we couldn't fold the RHS, hand over to the generic
396 fold routines. */
397 if (result == NULL_TREE)
398 result = fold (rhs);
400 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
401 that may have been added by fold, and "useless" type
402 conversions that might now be apparent due to propagation. */
403 STRIP_USELESS_TYPE_CONVERSION (result);
405 if (result != rhs && valid_gimple_rhs_p (result))
406 return result;
408 return NULL_TREE;
410 break;
412 case GIMPLE_UNARY_RHS:
413 break;
415 case GIMPLE_BINARY_RHS:
416 /* Try to canonicalize for boolean-typed X the comparisons
417 X == 0, X == 1, X != 0, and X != 1. */
418 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
419 || gimple_assign_rhs_code (stmt) == NE_EXPR)
421 tree lhs = gimple_assign_lhs (stmt);
422 tree op1 = gimple_assign_rhs1 (stmt);
423 tree op2 = gimple_assign_rhs2 (stmt);
424 tree type = TREE_TYPE (op1);
426 /* Check whether the comparison operands are of the same boolean
427 type as the result type is.
428 Check that second operand is an integer-constant with value
429 one or zero. */
430 if (TREE_CODE (op2) == INTEGER_CST
431 && (integer_zerop (op2) || integer_onep (op2))
432 && useless_type_conversion_p (TREE_TYPE (lhs), type))
434 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
435 bool is_logical_not = false;
437 /* X == 0 and X != 1 is a logical-not.of X
438 X == 1 and X != 0 is X */
439 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
440 || (cmp_code == NE_EXPR && integer_onep (op2)))
441 is_logical_not = true;
443 if (is_logical_not == false)
444 result = op1;
445 /* Only for one-bit precision typed X the transformation
446 !X -> ~X is valied. */
447 else if (TYPE_PRECISION (type) == 1)
448 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
449 type, op1);
450 /* Otherwise we use !X -> X ^ 1. */
451 else
452 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
453 type, op1, build_int_cst (type, 1));
458 if (!result)
459 result = fold_binary_loc (loc, subcode,
460 TREE_TYPE (gimple_assign_lhs (stmt)),
461 gimple_assign_rhs1 (stmt),
462 gimple_assign_rhs2 (stmt));
464 if (result)
466 STRIP_USELESS_TYPE_CONVERSION (result);
467 if (valid_gimple_rhs_p (result))
468 return result;
470 break;
472 case GIMPLE_TERNARY_RHS:
473 /* Try to fold a conditional expression. */
474 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
476 tree op0 = gimple_assign_rhs1 (stmt);
477 tree tem;
478 bool set = false;
479 location_t cond_loc = gimple_location (stmt);
481 if (COMPARISON_CLASS_P (op0))
483 fold_defer_overflow_warnings ();
484 tem = fold_binary_loc (cond_loc,
485 TREE_CODE (op0), TREE_TYPE (op0),
486 TREE_OPERAND (op0, 0),
487 TREE_OPERAND (op0, 1));
488 /* This is actually a conditional expression, not a GIMPLE
489 conditional statement, however, the valid_gimple_rhs_p
490 test still applies. */
491 set = (tem && is_gimple_condexpr (tem)
492 && valid_gimple_rhs_p (tem));
493 fold_undefer_overflow_warnings (set, stmt, 0);
495 else if (is_gimple_min_invariant (op0))
497 tem = op0;
498 set = true;
500 else
501 return NULL_TREE;
503 if (set)
504 result = fold_build3_loc (cond_loc, COND_EXPR,
505 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
506 gimple_assign_rhs2 (stmt),
507 gimple_assign_rhs3 (stmt));
510 if (!result)
511 result = fold_ternary_loc (loc, subcode,
512 TREE_TYPE (gimple_assign_lhs (stmt)),
513 gimple_assign_rhs1 (stmt),
514 gimple_assign_rhs2 (stmt),
515 gimple_assign_rhs3 (stmt));
517 if (result)
519 STRIP_USELESS_TYPE_CONVERSION (result);
520 if (valid_gimple_rhs_p (result))
521 return result;
523 break;
525 case GIMPLE_INVALID_RHS:
526 gcc_unreachable ();
529 return NULL_TREE;
532 /* Attempt to fold a conditional statement. Return true if any changes were
533 made. We only attempt to fold the condition expression, and do not perform
534 any transformation that would require alteration of the cfg. It is
535 assumed that the operands have been previously folded. */
537 static bool
538 fold_gimple_cond (gcond *stmt)
540 tree result = fold_binary_loc (gimple_location (stmt),
541 gimple_cond_code (stmt),
542 boolean_type_node,
543 gimple_cond_lhs (stmt),
544 gimple_cond_rhs (stmt));
546 if (result)
548 STRIP_USELESS_TYPE_CONVERSION (result);
549 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
551 gimple_cond_set_condition_from_tree (stmt, result);
552 return true;
556 return false;
560 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
561 adjusting the replacement stmts location and virtual operands.
562 If the statement has a lhs the last stmt in the sequence is expected
563 to assign to that lhs. */
565 static void
566 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
568 gimple stmt = gsi_stmt (*si_p);
570 if (gimple_has_location (stmt))
571 annotate_all_with_location (stmts, gimple_location (stmt));
573 /* First iterate over the replacement statements backward, assigning
574 virtual operands to their defining statements. */
575 gimple laststore = NULL;
576 for (gimple_stmt_iterator i = gsi_last (stmts);
577 !gsi_end_p (i); gsi_prev (&i))
579 gimple new_stmt = gsi_stmt (i);
580 if ((gimple_assign_single_p (new_stmt)
581 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
582 || (is_gimple_call (new_stmt)
583 && (gimple_call_flags (new_stmt)
584 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
586 tree vdef;
587 if (!laststore)
588 vdef = gimple_vdef (stmt);
589 else
590 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
591 gimple_set_vdef (new_stmt, vdef);
592 if (vdef && TREE_CODE (vdef) == SSA_NAME)
593 SSA_NAME_DEF_STMT (vdef) = new_stmt;
594 laststore = new_stmt;
598 /* Second iterate over the statements forward, assigning virtual
599 operands to their uses. */
600 tree reaching_vuse = gimple_vuse (stmt);
601 for (gimple_stmt_iterator i = gsi_start (stmts);
602 !gsi_end_p (i); gsi_next (&i))
604 gimple new_stmt = gsi_stmt (i);
605 /* If the new statement possibly has a VUSE, update it with exact SSA
606 name we know will reach this one. */
607 if (gimple_has_mem_ops (new_stmt))
608 gimple_set_vuse (new_stmt, reaching_vuse);
609 gimple_set_modified (new_stmt, true);
610 if (gimple_vdef (new_stmt))
611 reaching_vuse = gimple_vdef (new_stmt);
614 /* If the new sequence does not do a store release the virtual
615 definition of the original statement. */
616 if (reaching_vuse
617 && reaching_vuse == gimple_vuse (stmt))
619 tree vdef = gimple_vdef (stmt);
620 if (vdef
621 && TREE_CODE (vdef) == SSA_NAME)
623 unlink_stmt_vdef (stmt);
624 release_ssa_name (vdef);
628 /* Finally replace the original statement with the sequence. */
629 gsi_replace_with_seq (si_p, stmts, false);
632 /* Convert EXPR into a GIMPLE value suitable for substitution on the
633 RHS of an assignment. Insert the necessary statements before
634 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
635 is replaced. If the call is expected to produces a result, then it
636 is replaced by an assignment of the new RHS to the result variable.
637 If the result is to be ignored, then the call is replaced by a
638 GIMPLE_NOP. A proper VDEF chain is retained by making the first
639 VUSE and the last VDEF of the whole sequence be the same as the replaced
640 statement and using new SSA names for stores in between. */
642 void
643 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
645 tree lhs;
646 gimple stmt, new_stmt;
647 gimple_stmt_iterator i;
648 gimple_seq stmts = NULL;
650 stmt = gsi_stmt (*si_p);
652 gcc_assert (is_gimple_call (stmt));
654 push_gimplify_context (gimple_in_ssa_p (cfun));
656 lhs = gimple_call_lhs (stmt);
657 if (lhs == NULL_TREE)
659 gimplify_and_add (expr, &stmts);
660 /* We can end up with folding a memcpy of an empty class assignment
661 which gets optimized away by C++ gimplification. */
662 if (gimple_seq_empty_p (stmts))
664 pop_gimplify_context (NULL);
665 if (gimple_in_ssa_p (cfun))
667 unlink_stmt_vdef (stmt);
668 release_defs (stmt);
670 gsi_replace (si_p, gimple_build_nop (), true);
671 return;
674 else
676 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
677 new_stmt = gimple_build_assign (lhs, tmp);
678 i = gsi_last (stmts);
679 gsi_insert_after_without_update (&i, new_stmt,
680 GSI_CONTINUE_LINKING);
683 pop_gimplify_context (NULL);
685 gsi_replace_with_seq_vops (si_p, stmts);
689 /* Replace the call at *GSI with the gimple value VAL. */
691 static void
692 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
694 gimple stmt = gsi_stmt (*gsi);
695 tree lhs = gimple_call_lhs (stmt);
696 gimple repl;
697 if (lhs)
699 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
700 val = fold_convert (TREE_TYPE (lhs), val);
701 repl = gimple_build_assign (lhs, val);
703 else
704 repl = gimple_build_nop ();
705 tree vdef = gimple_vdef (stmt);
706 if (vdef && TREE_CODE (vdef) == SSA_NAME)
708 unlink_stmt_vdef (stmt);
709 release_ssa_name (vdef);
711 gsi_replace (gsi, repl, true);
714 /* Replace the call at *GSI with the new call REPL and fold that
715 again. */
717 static void
718 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
720 gimple stmt = gsi_stmt (*gsi);
721 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
722 gimple_set_location (repl, gimple_location (stmt));
723 if (gimple_vdef (stmt)
724 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
726 gimple_set_vdef (repl, gimple_vdef (stmt));
727 gimple_set_vuse (repl, gimple_vuse (stmt));
728 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
730 gsi_replace (gsi, repl, true);
731 fold_stmt (gsi);
734 /* Return true if VAR is a VAR_DECL or a component thereof. */
736 static bool
737 var_decl_component_p (tree var)
739 tree inner = var;
740 while (handled_component_p (inner))
741 inner = TREE_OPERAND (inner, 0);
742 return SSA_VAR_P (inner);
745 /* Fold function call to builtin mem{{,p}cpy,move}. Return
746 false if no simplification can be made.
747 If ENDP is 0, return DEST (like memcpy).
748 If ENDP is 1, return DEST+LEN (like mempcpy).
749 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
750 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
751 (memmove). */
753 static bool
754 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
755 tree dest, tree src, int endp)
757 gimple stmt = gsi_stmt (*gsi);
758 tree lhs = gimple_call_lhs (stmt);
759 tree len = gimple_call_arg (stmt, 2);
760 tree destvar, srcvar;
761 location_t loc = gimple_location (stmt);
763 /* If the LEN parameter is zero, return DEST. */
764 if (integer_zerop (len))
766 gimple repl;
767 if (gimple_call_lhs (stmt))
768 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
769 else
770 repl = gimple_build_nop ();
771 tree vdef = gimple_vdef (stmt);
772 if (vdef && TREE_CODE (vdef) == SSA_NAME)
774 unlink_stmt_vdef (stmt);
775 release_ssa_name (vdef);
777 gsi_replace (gsi, repl, true);
778 return true;
781 /* If SRC and DEST are the same (and not volatile), return
782 DEST{,+LEN,+LEN-1}. */
783 if (operand_equal_p (src, dest, 0))
785 unlink_stmt_vdef (stmt);
786 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
787 release_ssa_name (gimple_vdef (stmt));
788 if (!lhs)
790 gsi_replace (gsi, gimple_build_nop (), true);
791 return true;
793 goto done;
795 else
797 tree srctype, desttype;
798 unsigned int src_align, dest_align;
799 tree off0;
801 /* Build accesses at offset zero with a ref-all character type. */
802 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
803 ptr_mode, true), 0);
805 /* If we can perform the copy efficiently with first doing all loads
806 and then all stores inline it that way. Currently efficiently
807 means that we can load all the memory into a single integer
808 register which is what MOVE_MAX gives us. */
809 src_align = get_pointer_alignment (src);
810 dest_align = get_pointer_alignment (dest);
811 if (tree_fits_uhwi_p (len)
812 && compare_tree_int (len, MOVE_MAX) <= 0
813 /* ??? Don't transform copies from strings with known length this
814 confuses the tree-ssa-strlen.c. This doesn't handle
815 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
816 reason. */
817 && !c_strlen (src, 2))
819 unsigned ilen = tree_to_uhwi (len);
820 if (exact_log2 (ilen) != -1)
822 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
823 if (type
824 && TYPE_MODE (type) != BLKmode
825 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
826 == ilen * 8)
827 /* If the destination pointer is not aligned we must be able
828 to emit an unaligned store. */
829 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
830 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
832 tree srctype = type;
833 tree desttype = type;
834 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
835 srctype = build_aligned_type (type, src_align);
836 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
837 tree tem = fold_const_aggregate_ref (srcmem);
838 if (tem)
839 srcmem = tem;
840 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
841 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
842 src_align))
843 srcmem = NULL_TREE;
844 if (srcmem)
846 gimple new_stmt;
847 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
849 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
850 if (gimple_in_ssa_p (cfun))
851 srcmem = make_ssa_name (TREE_TYPE (srcmem),
852 new_stmt);
853 else
854 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
855 gimple_assign_set_lhs (new_stmt, srcmem);
856 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
857 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
859 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
860 desttype = build_aligned_type (type, dest_align);
861 new_stmt
862 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
863 dest, off0),
864 srcmem);
865 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
866 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
867 if (gimple_vdef (new_stmt)
868 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
869 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
870 if (!lhs)
872 gsi_replace (gsi, new_stmt, true);
873 return true;
875 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
876 goto done;
882 if (endp == 3)
884 /* Both DEST and SRC must be pointer types.
885 ??? This is what old code did. Is the testing for pointer types
886 really mandatory?
888 If either SRC is readonly or length is 1, we can use memcpy. */
889 if (!dest_align || !src_align)
890 return false;
891 if (readonly_data_expr (src)
892 || (tree_fits_uhwi_p (len)
893 && (MIN (src_align, dest_align) / BITS_PER_UNIT
894 >= tree_to_uhwi (len))))
896 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
897 if (!fn)
898 return false;
899 gimple_call_set_fndecl (stmt, fn);
900 gimple_call_set_arg (stmt, 0, dest);
901 gimple_call_set_arg (stmt, 1, src);
902 fold_stmt (gsi);
903 return true;
906 /* If *src and *dest can't overlap, optimize into memcpy as well. */
907 if (TREE_CODE (src) == ADDR_EXPR
908 && TREE_CODE (dest) == ADDR_EXPR)
910 tree src_base, dest_base, fn;
911 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
912 HOST_WIDE_INT size = -1;
913 HOST_WIDE_INT maxsize = -1;
915 srcvar = TREE_OPERAND (src, 0);
916 src_base = get_ref_base_and_extent (srcvar, &src_offset,
917 &size, &maxsize);
918 destvar = TREE_OPERAND (dest, 0);
919 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
920 &size, &maxsize);
921 if (tree_fits_uhwi_p (len))
922 maxsize = tree_to_uhwi (len);
923 else
924 maxsize = -1;
925 src_offset /= BITS_PER_UNIT;
926 dest_offset /= BITS_PER_UNIT;
927 if (SSA_VAR_P (src_base)
928 && SSA_VAR_P (dest_base))
930 if (operand_equal_p (src_base, dest_base, 0)
931 && ranges_overlap_p (src_offset, maxsize,
932 dest_offset, maxsize))
933 return false;
935 else if (TREE_CODE (src_base) == MEM_REF
936 && TREE_CODE (dest_base) == MEM_REF)
938 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
939 TREE_OPERAND (dest_base, 0), 0))
940 return false;
941 offset_int off = mem_ref_offset (src_base) + src_offset;
942 if (!wi::fits_shwi_p (off))
943 return false;
944 src_offset = off.to_shwi ();
946 off = mem_ref_offset (dest_base) + dest_offset;
947 if (!wi::fits_shwi_p (off))
948 return false;
949 dest_offset = off.to_shwi ();
950 if (ranges_overlap_p (src_offset, maxsize,
951 dest_offset, maxsize))
952 return false;
954 else
955 return false;
957 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
958 if (!fn)
959 return false;
960 gimple_call_set_fndecl (stmt, fn);
961 gimple_call_set_arg (stmt, 0, dest);
962 gimple_call_set_arg (stmt, 1, src);
963 fold_stmt (gsi);
964 return true;
967 /* If the destination and source do not alias optimize into
968 memcpy as well. */
969 if ((is_gimple_min_invariant (dest)
970 || TREE_CODE (dest) == SSA_NAME)
971 && (is_gimple_min_invariant (src)
972 || TREE_CODE (src) == SSA_NAME))
974 ao_ref destr, srcr;
975 ao_ref_init_from_ptr_and_size (&destr, dest, len);
976 ao_ref_init_from_ptr_and_size (&srcr, src, len);
977 if (!refs_may_alias_p_1 (&destr, &srcr, false))
979 tree fn;
980 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
981 if (!fn)
982 return false;
983 gimple_call_set_fndecl (stmt, fn);
984 gimple_call_set_arg (stmt, 0, dest);
985 gimple_call_set_arg (stmt, 1, src);
986 fold_stmt (gsi);
987 return true;
991 return false;
994 if (!tree_fits_shwi_p (len))
995 return false;
996 /* FIXME:
997 This logic lose for arguments like (type *)malloc (sizeof (type)),
998 since we strip the casts of up to VOID return value from malloc.
999 Perhaps we ought to inherit type from non-VOID argument here? */
1000 STRIP_NOPS (src);
1001 STRIP_NOPS (dest);
1002 if (!POINTER_TYPE_P (TREE_TYPE (src))
1003 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1004 return false;
1005 /* In the following try to find a type that is most natural to be
1006 used for the memcpy source and destination and that allows
1007 the most optimization when memcpy is turned into a plain assignment
1008 using that type. In theory we could always use a char[len] type
1009 but that only gains us that the destination and source possibly
1010 no longer will have their address taken. */
1011 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1012 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1014 tree tem = TREE_OPERAND (src, 0);
1015 STRIP_NOPS (tem);
1016 if (tem != TREE_OPERAND (src, 0))
1017 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1019 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1021 tree tem = TREE_OPERAND (dest, 0);
1022 STRIP_NOPS (tem);
1023 if (tem != TREE_OPERAND (dest, 0))
1024 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1026 srctype = TREE_TYPE (TREE_TYPE (src));
1027 if (TREE_CODE (srctype) == ARRAY_TYPE
1028 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1030 srctype = TREE_TYPE (srctype);
1031 STRIP_NOPS (src);
1032 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1034 desttype = TREE_TYPE (TREE_TYPE (dest));
1035 if (TREE_CODE (desttype) == ARRAY_TYPE
1036 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1038 desttype = TREE_TYPE (desttype);
1039 STRIP_NOPS (dest);
1040 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1042 if (TREE_ADDRESSABLE (srctype)
1043 || TREE_ADDRESSABLE (desttype))
1044 return false;
1046 /* Make sure we are not copying using a floating-point mode or
1047 a type whose size possibly does not match its precision. */
1048 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1049 || TREE_CODE (desttype) == BOOLEAN_TYPE
1050 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1051 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1052 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1053 || TREE_CODE (srctype) == BOOLEAN_TYPE
1054 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1055 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1056 if (!srctype)
1057 srctype = desttype;
1058 if (!desttype)
1059 desttype = srctype;
1060 if (!srctype)
1061 return false;
1063 src_align = get_pointer_alignment (src);
1064 dest_align = get_pointer_alignment (dest);
1065 if (dest_align < TYPE_ALIGN (desttype)
1066 || src_align < TYPE_ALIGN (srctype))
1067 return false;
1069 destvar = dest;
1070 STRIP_NOPS (destvar);
1071 if (TREE_CODE (destvar) == ADDR_EXPR
1072 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1073 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1074 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1075 else
1076 destvar = NULL_TREE;
1078 srcvar = src;
1079 STRIP_NOPS (srcvar);
1080 if (TREE_CODE (srcvar) == ADDR_EXPR
1081 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1082 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1084 if (!destvar
1085 || src_align >= TYPE_ALIGN (desttype))
1086 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1087 srcvar, off0);
1088 else if (!STRICT_ALIGNMENT)
1090 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1091 src_align);
1092 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1094 else
1095 srcvar = NULL_TREE;
1097 else
1098 srcvar = NULL_TREE;
1100 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1101 return false;
1103 if (srcvar == NULL_TREE)
1105 STRIP_NOPS (src);
1106 if (src_align >= TYPE_ALIGN (desttype))
1107 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1108 else
1110 if (STRICT_ALIGNMENT)
1111 return false;
1112 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1113 src_align);
1114 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1117 else if (destvar == NULL_TREE)
1119 STRIP_NOPS (dest);
1120 if (dest_align >= TYPE_ALIGN (srctype))
1121 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1122 else
1124 if (STRICT_ALIGNMENT)
1125 return false;
1126 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1127 dest_align);
1128 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1132 gimple new_stmt;
1133 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1135 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1136 if (gimple_in_ssa_p (cfun))
1137 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1138 else
1139 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1140 gimple_assign_set_lhs (new_stmt, srcvar);
1141 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1142 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1144 new_stmt = gimple_build_assign (destvar, srcvar);
1145 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1146 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1147 if (gimple_vdef (new_stmt)
1148 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1149 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1150 if (!lhs)
1152 gsi_replace (gsi, new_stmt, true);
1153 return true;
1155 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1158 done:
1159 if (endp == 0 || endp == 3)
1160 len = NULL_TREE;
1161 else if (endp == 2)
1162 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1163 ssize_int (1));
1164 if (endp == 2 || endp == 1)
1165 dest = fold_build_pointer_plus_loc (loc, dest, len);
1167 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1168 GSI_SAME_STMT);
1169 gimple repl = gimple_build_assign (lhs, dest);
1170 gsi_replace (gsi, repl, true);
1171 return true;
1174 /* Fold function call to builtin memset or bzero at *GSI setting the
1175 memory of size LEN to VAL. Return whether a simplification was made. */
1177 static bool
1178 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1180 gimple stmt = gsi_stmt (*gsi);
1181 tree etype;
1182 unsigned HOST_WIDE_INT length, cval;
1184 /* If the LEN parameter is zero, return DEST. */
1185 if (integer_zerop (len))
1187 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1188 return true;
1191 if (! tree_fits_uhwi_p (len))
1192 return false;
1194 if (TREE_CODE (c) != INTEGER_CST)
1195 return false;
1197 tree dest = gimple_call_arg (stmt, 0);
1198 tree var = dest;
1199 if (TREE_CODE (var) != ADDR_EXPR)
1200 return false;
1202 var = TREE_OPERAND (var, 0);
1203 if (TREE_THIS_VOLATILE (var))
1204 return false;
1206 etype = TREE_TYPE (var);
1207 if (TREE_CODE (etype) == ARRAY_TYPE)
1208 etype = TREE_TYPE (etype);
1210 if (!INTEGRAL_TYPE_P (etype)
1211 && !POINTER_TYPE_P (etype))
1212 return NULL_TREE;
1214 if (! var_decl_component_p (var))
1215 return NULL_TREE;
1217 length = tree_to_uhwi (len);
1218 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1219 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1220 return NULL_TREE;
1222 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1223 return NULL_TREE;
1225 if (integer_zerop (c))
1226 cval = 0;
1227 else
1229 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1230 return NULL_TREE;
1232 cval = TREE_INT_CST_LOW (c);
1233 cval &= 0xff;
1234 cval |= cval << 8;
1235 cval |= cval << 16;
1236 cval |= (cval << 31) << 1;
1239 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1240 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1241 gimple_set_vuse (store, gimple_vuse (stmt));
1242 tree vdef = gimple_vdef (stmt);
1243 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1245 gimple_set_vdef (store, gimple_vdef (stmt));
1246 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1248 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1249 if (gimple_call_lhs (stmt))
1251 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1252 gsi_replace (gsi, asgn, true);
1254 else
1256 gimple_stmt_iterator gsi2 = *gsi;
1257 gsi_prev (gsi);
1258 gsi_remove (&gsi2, true);
1261 return true;
1265 /* Return the string length, maximum string length or maximum value of
1266 ARG in LENGTH.
1267 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1268 is not NULL and, for TYPE == 0, its value is not equal to the length
1269 we determine or if we are unable to determine the length or value,
1270 return false. VISITED is a bitmap of visited variables.
1271 TYPE is 0 if string length should be returned, 1 for maximum string
1272 length and 2 for maximum value ARG can have. */
1274 static bool
1275 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1277 tree var, val;
1278 gimple def_stmt;
1280 if (TREE_CODE (arg) != SSA_NAME)
1282 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1283 if (TREE_CODE (arg) == ADDR_EXPR
1284 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1285 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1287 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1288 if (TREE_CODE (aop0) == INDIRECT_REF
1289 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1290 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1291 length, visited, type);
1294 if (type == 2)
1296 val = arg;
1297 if (TREE_CODE (val) != INTEGER_CST
1298 || tree_int_cst_sgn (val) < 0)
1299 return false;
1301 else
1302 val = c_strlen (arg, 1);
1303 if (!val)
1304 return false;
1306 if (*length)
1308 if (type > 0)
1310 if (TREE_CODE (*length) != INTEGER_CST
1311 || TREE_CODE (val) != INTEGER_CST)
1312 return false;
1314 if (tree_int_cst_lt (*length, val))
1315 *length = val;
1316 return true;
1318 else if (simple_cst_equal (val, *length) != 1)
1319 return false;
1322 *length = val;
1323 return true;
1326 /* If ARG is registered for SSA update we cannot look at its defining
1327 statement. */
1328 if (name_registered_for_update_p (arg))
1329 return false;
1331 /* If we were already here, break the infinite cycle. */
1332 if (!*visited)
1333 *visited = BITMAP_ALLOC (NULL);
1334 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1335 return true;
1337 var = arg;
1338 def_stmt = SSA_NAME_DEF_STMT (var);
1340 switch (gimple_code (def_stmt))
1342 case GIMPLE_ASSIGN:
1343 /* The RHS of the statement defining VAR must either have a
1344 constant length or come from another SSA_NAME with a constant
1345 length. */
1346 if (gimple_assign_single_p (def_stmt)
1347 || gimple_assign_unary_nop_p (def_stmt))
1349 tree rhs = gimple_assign_rhs1 (def_stmt);
1350 return get_maxval_strlen (rhs, length, visited, type);
1352 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1354 tree op2 = gimple_assign_rhs2 (def_stmt);
1355 tree op3 = gimple_assign_rhs3 (def_stmt);
1356 return get_maxval_strlen (op2, length, visited, type)
1357 && get_maxval_strlen (op3, length, visited, type);
1359 return false;
1361 case GIMPLE_PHI:
1363 /* All the arguments of the PHI node must have the same constant
1364 length. */
1365 unsigned i;
1367 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1369 tree arg = gimple_phi_arg (def_stmt, i)->def;
1371 /* If this PHI has itself as an argument, we cannot
1372 determine the string length of this argument. However,
1373 if we can find a constant string length for the other
1374 PHI args then we can still be sure that this is a
1375 constant string length. So be optimistic and just
1376 continue with the next argument. */
1377 if (arg == gimple_phi_result (def_stmt))
1378 continue;
1380 if (!get_maxval_strlen (arg, length, visited, type))
1381 return false;
1384 return true;
1386 default:
1387 return false;
1391 tree
1392 get_maxval_strlen (tree arg, int type)
1394 bitmap visited = NULL;
1395 tree len = NULL_TREE;
1396 if (!get_maxval_strlen (arg, &len, &visited, type))
1397 len = NULL_TREE;
1398 if (visited)
1399 BITMAP_FREE (visited);
1401 return len;
1405 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1406 If LEN is not NULL, it represents the length of the string to be
1407 copied. Return NULL_TREE if no simplification can be made. */
1409 static bool
1410 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1411 tree dest, tree src)
1413 location_t loc = gimple_location (gsi_stmt (*gsi));
1414 tree fn;
1416 /* If SRC and DEST are the same (and not volatile), return DEST. */
1417 if (operand_equal_p (src, dest, 0))
1419 replace_call_with_value (gsi, dest);
1420 return true;
1423 if (optimize_function_for_size_p (cfun))
1424 return false;
1426 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1427 if (!fn)
1428 return false;
1430 tree len = get_maxval_strlen (src, 0);
1431 if (!len)
1432 return false;
1434 len = fold_convert_loc (loc, size_type_node, len);
1435 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1436 len = force_gimple_operand_gsi (gsi, len, true,
1437 NULL_TREE, true, GSI_SAME_STMT);
1438 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1439 replace_call_with_call_and_fold (gsi, repl);
1440 return true;
1443 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1444 If SLEN is not NULL, it represents the length of the source string.
1445 Return NULL_TREE if no simplification can be made. */
1447 static bool
1448 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1449 tree dest, tree src, tree len)
1451 location_t loc = gimple_location (gsi_stmt (*gsi));
1452 tree fn;
1454 /* If the LEN parameter is zero, return DEST. */
1455 if (integer_zerop (len))
1457 replace_call_with_value (gsi, dest);
1458 return true;
1461 /* We can't compare slen with len as constants below if len is not a
1462 constant. */
1463 if (TREE_CODE (len) != INTEGER_CST)
1464 return false;
1466 /* Now, we must be passed a constant src ptr parameter. */
1467 tree slen = get_maxval_strlen (src, 0);
1468 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1469 return false;
1471 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1473 /* We do not support simplification of this case, though we do
1474 support it when expanding trees into RTL. */
1475 /* FIXME: generate a call to __builtin_memset. */
1476 if (tree_int_cst_lt (slen, len))
1477 return false;
1479 /* OK transform into builtin memcpy. */
1480 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1481 if (!fn)
1482 return false;
1484 len = fold_convert_loc (loc, size_type_node, len);
1485 len = force_gimple_operand_gsi (gsi, len, true,
1486 NULL_TREE, true, GSI_SAME_STMT);
1487 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1488 replace_call_with_call_and_fold (gsi, repl);
1489 return true;
1492 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1493 to the call.
1495 Return NULL_TREE if no simplification was possible, otherwise return the
1496 simplified form of the call as a tree.
1498 The simplified form may be a constant or other expression which
1499 computes the same value, but in a more efficient manner (including
1500 calls to other builtin functions).
1502 The call may contain arguments which need to be evaluated, but
1503 which are not useful to determine the result of the call. In
1504 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1505 COMPOUND_EXPR will be an argument which must be evaluated.
1506 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1507 COMPOUND_EXPR in the chain will contain the tree for the simplified
1508 form of the builtin function call. */
1510 static bool
1511 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1513 gimple stmt = gsi_stmt (*gsi);
1514 location_t loc = gimple_location (stmt);
1516 const char *p = c_getstr (src);
1518 /* If the string length is zero, return the dst parameter. */
1519 if (p && *p == '\0')
1521 replace_call_with_value (gsi, dst);
1522 return true;
1525 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1526 return false;
1528 /* See if we can store by pieces into (dst + strlen(dst)). */
1529 tree newdst;
1530 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1531 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1533 if (!strlen_fn || !memcpy_fn)
1534 return false;
1536 /* If the length of the source string isn't computable don't
1537 split strcat into strlen and memcpy. */
1538 tree len = get_maxval_strlen (src, 0);
1539 if (! len)
1540 return false;
1542 /* Create strlen (dst). */
1543 gimple_seq stmts = NULL, stmts2;
1544 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1545 gimple_set_location (repl, loc);
1546 if (gimple_in_ssa_p (cfun))
1547 newdst = make_ssa_name (size_type_node);
1548 else
1549 newdst = create_tmp_reg (size_type_node);
1550 gimple_call_set_lhs (repl, newdst);
1551 gimple_seq_add_stmt_without_update (&stmts, repl);
1553 /* Create (dst p+ strlen (dst)). */
1554 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1555 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1556 gimple_seq_add_seq_without_update (&stmts, stmts2);
1558 len = fold_convert_loc (loc, size_type_node, len);
1559 len = size_binop_loc (loc, PLUS_EXPR, len,
1560 build_int_cst (size_type_node, 1));
1561 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1562 gimple_seq_add_seq_without_update (&stmts, stmts2);
1564 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1565 gimple_seq_add_stmt_without_update (&stmts, repl);
1566 if (gimple_call_lhs (stmt))
1568 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1569 gimple_seq_add_stmt_without_update (&stmts, repl);
1570 gsi_replace_with_seq_vops (gsi, stmts);
1571 /* gsi now points at the assignment to the lhs, get a
1572 stmt iterator to the memcpy call.
1573 ??? We can't use gsi_for_stmt as that doesn't work when the
1574 CFG isn't built yet. */
1575 gimple_stmt_iterator gsi2 = *gsi;
1576 gsi_prev (&gsi2);
1577 fold_stmt (&gsi2);
1579 else
1581 gsi_replace_with_seq_vops (gsi, stmts);
1582 fold_stmt (gsi);
1584 return true;
1587 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1588 are the arguments to the call. */
1590 static bool
1591 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1593 gimple stmt = gsi_stmt (*gsi);
1594 tree dest = gimple_call_arg (stmt, 0);
1595 tree src = gimple_call_arg (stmt, 1);
1596 tree size = gimple_call_arg (stmt, 2);
1597 tree fn;
1598 const char *p;
1601 p = c_getstr (src);
1602 /* If the SRC parameter is "", return DEST. */
1603 if (p && *p == '\0')
1605 replace_call_with_value (gsi, dest);
1606 return true;
1609 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1610 return false;
1612 /* If __builtin_strcat_chk is used, assume strcat is available. */
1613 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1614 if (!fn)
1615 return false;
1617 gimple repl = gimple_build_call (fn, 2, dest, src);
1618 replace_call_with_call_and_fold (gsi, repl);
1619 return true;
1622 /* Simplify a call to the strncat builtin. */
1624 static bool
1625 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1627 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1628 tree dst = gimple_call_arg (stmt, 0);
1629 tree src = gimple_call_arg (stmt, 1);
1630 tree len = gimple_call_arg (stmt, 2);
1632 const char *p = c_getstr (src);
1634 /* If the requested length is zero, or the src parameter string
1635 length is zero, return the dst parameter. */
1636 if (integer_zerop (len) || (p && *p == '\0'))
1638 replace_call_with_value (gsi, dst);
1639 return true;
1642 /* If the requested len is greater than or equal to the string
1643 length, call strcat. */
1644 if (TREE_CODE (len) == INTEGER_CST && p
1645 && compare_tree_int (len, strlen (p)) >= 0)
1647 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1649 /* If the replacement _DECL isn't initialized, don't do the
1650 transformation. */
1651 if (!fn)
1652 return false;
1654 gcall *repl = gimple_build_call (fn, 2, dst, src);
1655 replace_call_with_call_and_fold (gsi, repl);
1656 return true;
1659 return false;
1662 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1663 LEN, and SIZE. */
1665 static bool
1666 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1668 gimple stmt = gsi_stmt (*gsi);
1669 tree dest = gimple_call_arg (stmt, 0);
1670 tree src = gimple_call_arg (stmt, 1);
1671 tree len = gimple_call_arg (stmt, 2);
1672 tree size = gimple_call_arg (stmt, 3);
1673 tree fn;
1674 const char *p;
1676 p = c_getstr (src);
1677 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1678 if ((p && *p == '\0')
1679 || integer_zerop (len))
1681 replace_call_with_value (gsi, dest);
1682 return true;
1685 if (! tree_fits_uhwi_p (size))
1686 return false;
1688 if (! integer_all_onesp (size))
1690 tree src_len = c_strlen (src, 1);
1691 if (src_len
1692 && tree_fits_uhwi_p (src_len)
1693 && tree_fits_uhwi_p (len)
1694 && ! tree_int_cst_lt (len, src_len))
1696 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1697 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1698 if (!fn)
1699 return false;
1701 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1702 replace_call_with_call_and_fold (gsi, repl);
1703 return true;
1705 return false;
1708 /* If __builtin_strncat_chk is used, assume strncat is available. */
1709 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1710 if (!fn)
1711 return false;
1713 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1714 replace_call_with_call_and_fold (gsi, repl);
1715 return true;
1718 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1719 to the call. IGNORE is true if the value returned
1720 by the builtin will be ignored. UNLOCKED is true is true if this
1721 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1722 the known length of the string. Return NULL_TREE if no simplification
1723 was possible. */
1725 static bool
1726 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1727 tree arg0, tree arg1,
1728 bool unlocked)
1730 gimple stmt = gsi_stmt (*gsi);
1732 /* If we're using an unlocked function, assume the other unlocked
1733 functions exist explicitly. */
1734 tree const fn_fputc = (unlocked
1735 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1736 : builtin_decl_implicit (BUILT_IN_FPUTC));
1737 tree const fn_fwrite = (unlocked
1738 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1739 : builtin_decl_implicit (BUILT_IN_FWRITE));
1741 /* If the return value is used, don't do the transformation. */
1742 if (gimple_call_lhs (stmt))
1743 return false;
1745 /* Get the length of the string passed to fputs. If the length
1746 can't be determined, punt. */
1747 tree len = get_maxval_strlen (arg0, 0);
1748 if (!len
1749 || TREE_CODE (len) != INTEGER_CST)
1750 return false;
1752 switch (compare_tree_int (len, 1))
1754 case -1: /* length is 0, delete the call entirely . */
1755 replace_call_with_value (gsi, integer_zero_node);
1756 return true;
1758 case 0: /* length is 1, call fputc. */
1760 const char *p = c_getstr (arg0);
1761 if (p != NULL)
1763 if (!fn_fputc)
1764 return false;
1766 gimple repl = gimple_build_call (fn_fputc, 2,
1767 build_int_cst
1768 (integer_type_node, p[0]), arg1);
1769 replace_call_with_call_and_fold (gsi, repl);
1770 return true;
1773 /* FALLTHROUGH */
1774 case 1: /* length is greater than 1, call fwrite. */
1776 /* If optimizing for size keep fputs. */
1777 if (optimize_function_for_size_p (cfun))
1778 return false;
1779 /* New argument list transforming fputs(string, stream) to
1780 fwrite(string, 1, len, stream). */
1781 if (!fn_fwrite)
1782 return false;
1784 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1785 size_one_node, len, arg1);
1786 replace_call_with_call_and_fold (gsi, repl);
1787 return true;
1789 default:
1790 gcc_unreachable ();
1792 return false;
1795 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1796 DEST, SRC, LEN, and SIZE are the arguments to the call.
1797 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1798 code of the builtin. If MAXLEN is not NULL, it is maximum length
1799 passed as third argument. */
1801 static bool
1802 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1803 tree dest, tree src, tree len, tree size,
1804 enum built_in_function fcode)
1806 gimple stmt = gsi_stmt (*gsi);
1807 location_t loc = gimple_location (stmt);
1808 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1809 tree fn;
1811 /* If SRC and DEST are the same (and not volatile), return DEST
1812 (resp. DEST+LEN for __mempcpy_chk). */
1813 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1815 if (fcode != BUILT_IN_MEMPCPY_CHK)
1817 replace_call_with_value (gsi, dest);
1818 return true;
1820 else
1822 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1823 temp = force_gimple_operand_gsi (gsi, temp,
1824 false, NULL_TREE, true,
1825 GSI_SAME_STMT);
1826 replace_call_with_value (gsi, temp);
1827 return true;
1831 if (! tree_fits_uhwi_p (size))
1832 return false;
1834 tree maxlen = get_maxval_strlen (len, 2);
1835 if (! integer_all_onesp (size))
1837 if (! tree_fits_uhwi_p (len))
1839 /* If LEN is not constant, try MAXLEN too.
1840 For MAXLEN only allow optimizing into non-_ocs function
1841 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1842 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1844 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1846 /* (void) __mempcpy_chk () can be optimized into
1847 (void) __memcpy_chk (). */
1848 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1849 if (!fn)
1850 return false;
1852 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1853 replace_call_with_call_and_fold (gsi, repl);
1854 return true;
1856 return false;
1859 else
1860 maxlen = len;
1862 if (tree_int_cst_lt (size, maxlen))
1863 return false;
1866 fn = NULL_TREE;
1867 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1868 mem{cpy,pcpy,move,set} is available. */
1869 switch (fcode)
1871 case BUILT_IN_MEMCPY_CHK:
1872 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1873 break;
1874 case BUILT_IN_MEMPCPY_CHK:
1875 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1876 break;
1877 case BUILT_IN_MEMMOVE_CHK:
1878 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1879 break;
1880 case BUILT_IN_MEMSET_CHK:
1881 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1882 break;
1883 default:
1884 break;
1887 if (!fn)
1888 return false;
1890 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1891 replace_call_with_call_and_fold (gsi, repl);
1892 return true;
1895 /* Fold a call to the __st[rp]cpy_chk builtin.
1896 DEST, SRC, and SIZE are the arguments to the call.
1897 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1898 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1899 strings passed as second argument. */
1901 static bool
1902 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1903 tree dest,
1904 tree src, tree size,
1905 enum built_in_function fcode)
1907 gimple stmt = gsi_stmt (*gsi);
1908 location_t loc = gimple_location (stmt);
1909 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1910 tree len, fn;
1912 /* If SRC and DEST are the same (and not volatile), return DEST. */
1913 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1915 replace_call_with_value (gsi, dest);
1916 return true;
1919 if (! tree_fits_uhwi_p (size))
1920 return false;
1922 tree maxlen = get_maxval_strlen (src, 1);
1923 if (! integer_all_onesp (size))
1925 len = c_strlen (src, 1);
1926 if (! len || ! tree_fits_uhwi_p (len))
1928 /* If LEN is not constant, try MAXLEN too.
1929 For MAXLEN only allow optimizing into non-_ocs function
1930 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1931 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1933 if (fcode == BUILT_IN_STPCPY_CHK)
1935 if (! ignore)
1936 return false;
1938 /* If return value of __stpcpy_chk is ignored,
1939 optimize into __strcpy_chk. */
1940 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1941 if (!fn)
1942 return false;
1944 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1945 replace_call_with_call_and_fold (gsi, repl);
1946 return true;
1949 if (! len || TREE_SIDE_EFFECTS (len))
1950 return false;
1952 /* If c_strlen returned something, but not a constant,
1953 transform __strcpy_chk into __memcpy_chk. */
1954 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1955 if (!fn)
1956 return false;
1958 len = fold_convert_loc (loc, size_type_node, len);
1959 len = size_binop_loc (loc, PLUS_EXPR, len,
1960 build_int_cst (size_type_node, 1));
1961 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1962 true, GSI_SAME_STMT);
1963 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1964 replace_call_with_call_and_fold (gsi, repl);
1965 return true;
1968 else
1969 maxlen = len;
1971 if (! tree_int_cst_lt (maxlen, size))
1972 return false;
1975 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1976 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1977 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1978 if (!fn)
1979 return false;
1981 gimple repl = gimple_build_call (fn, 2, dest, src);
1982 replace_call_with_call_and_fold (gsi, repl);
1983 return true;
1986 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1987 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1988 length passed as third argument. IGNORE is true if return value can be
1989 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1991 static bool
1992 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1993 tree dest, tree src,
1994 tree len, tree size,
1995 enum built_in_function fcode)
1997 gimple stmt = gsi_stmt (*gsi);
1998 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1999 tree fn;
2001 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2003 /* If return value of __stpncpy_chk is ignored,
2004 optimize into __strncpy_chk. */
2005 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2006 if (fn)
2008 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
2009 replace_call_with_call_and_fold (gsi, repl);
2010 return true;
2014 if (! tree_fits_uhwi_p (size))
2015 return false;
2017 tree maxlen = get_maxval_strlen (len, 2);
2018 if (! integer_all_onesp (size))
2020 if (! tree_fits_uhwi_p (len))
2022 /* If LEN is not constant, try MAXLEN too.
2023 For MAXLEN only allow optimizing into non-_ocs function
2024 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2025 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2026 return false;
2028 else
2029 maxlen = len;
2031 if (tree_int_cst_lt (size, maxlen))
2032 return false;
2035 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2036 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2037 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2038 if (!fn)
2039 return false;
2041 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2042 replace_call_with_call_and_fold (gsi, repl);
2043 return true;
2046 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2047 Return NULL_TREE if no simplification can be made. */
2049 static bool
2050 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2052 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2053 location_t loc = gimple_location (stmt);
2054 tree dest = gimple_call_arg (stmt, 0);
2055 tree src = gimple_call_arg (stmt, 1);
2056 tree fn, len, lenp1;
2058 /* If the result is unused, replace stpcpy with strcpy. */
2059 if (gimple_call_lhs (stmt) == NULL_TREE)
2061 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2062 if (!fn)
2063 return false;
2064 gimple_call_set_fndecl (stmt, fn);
2065 fold_stmt (gsi);
2066 return true;
2069 len = c_strlen (src, 1);
2070 if (!len
2071 || TREE_CODE (len) != INTEGER_CST)
2072 return false;
2074 if (optimize_function_for_size_p (cfun)
2075 /* If length is zero it's small enough. */
2076 && !integer_zerop (len))
2077 return false;
2079 /* If the source has a known length replace stpcpy with memcpy. */
2080 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2081 if (!fn)
2082 return false;
2084 gimple_seq stmts = NULL;
2085 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2086 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2087 tem, build_int_cst (size_type_node, 1));
2088 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2089 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2090 gimple_set_vuse (repl, gimple_vuse (stmt));
2091 gimple_set_vdef (repl, gimple_vdef (stmt));
2092 if (gimple_vdef (repl)
2093 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2094 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2095 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2096 /* Replace the result with dest + len. */
2097 stmts = NULL;
2098 tem = gimple_convert (&stmts, loc, sizetype, len);
2099 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2100 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2101 POINTER_PLUS_EXPR, dest, tem);
2102 gsi_replace (gsi, ret, true);
2103 /* Finally fold the memcpy call. */
2104 gimple_stmt_iterator gsi2 = *gsi;
2105 gsi_prev (&gsi2);
2106 fold_stmt (&gsi2);
2107 return true;
2110 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2111 NULL_TREE if a normal call should be emitted rather than expanding
2112 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2113 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2114 passed as second argument. */
2116 static bool
2117 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2118 enum built_in_function fcode)
2120 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2121 tree dest, size, len, fn, fmt, flag;
2122 const char *fmt_str;
2124 /* Verify the required arguments in the original call. */
2125 if (gimple_call_num_args (stmt) < 5)
2126 return false;
2128 dest = gimple_call_arg (stmt, 0);
2129 len = gimple_call_arg (stmt, 1);
2130 flag = gimple_call_arg (stmt, 2);
2131 size = gimple_call_arg (stmt, 3);
2132 fmt = gimple_call_arg (stmt, 4);
2134 if (! tree_fits_uhwi_p (size))
2135 return false;
2137 if (! integer_all_onesp (size))
2139 tree maxlen = get_maxval_strlen (len, 2);
2140 if (! tree_fits_uhwi_p (len))
2142 /* If LEN is not constant, try MAXLEN too.
2143 For MAXLEN only allow optimizing into non-_ocs function
2144 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2145 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2146 return false;
2148 else
2149 maxlen = len;
2151 if (tree_int_cst_lt (size, maxlen))
2152 return false;
2155 if (!init_target_chars ())
2156 return false;
2158 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2159 or if format doesn't contain % chars or is "%s". */
2160 if (! integer_zerop (flag))
2162 fmt_str = c_getstr (fmt);
2163 if (fmt_str == NULL)
2164 return false;
2165 if (strchr (fmt_str, target_percent) != NULL
2166 && strcmp (fmt_str, target_percent_s))
2167 return false;
2170 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2171 available. */
2172 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2173 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2174 if (!fn)
2175 return false;
2177 /* Replace the called function and the first 5 argument by 3 retaining
2178 trailing varargs. */
2179 gimple_call_set_fndecl (stmt, fn);
2180 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2181 gimple_call_set_arg (stmt, 0, dest);
2182 gimple_call_set_arg (stmt, 1, len);
2183 gimple_call_set_arg (stmt, 2, fmt);
2184 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2185 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2186 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2187 fold_stmt (gsi);
2188 return true;
2191 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2192 Return NULL_TREE if a normal call should be emitted rather than
2193 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2194 or BUILT_IN_VSPRINTF_CHK. */
2196 static bool
2197 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2198 enum built_in_function fcode)
2200 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2201 tree dest, size, len, fn, fmt, flag;
2202 const char *fmt_str;
2203 unsigned nargs = gimple_call_num_args (stmt);
2205 /* Verify the required arguments in the original call. */
2206 if (nargs < 4)
2207 return false;
2208 dest = gimple_call_arg (stmt, 0);
2209 flag = gimple_call_arg (stmt, 1);
2210 size = gimple_call_arg (stmt, 2);
2211 fmt = gimple_call_arg (stmt, 3);
2213 if (! tree_fits_uhwi_p (size))
2214 return false;
2216 len = NULL_TREE;
2218 if (!init_target_chars ())
2219 return false;
2221 /* Check whether the format is a literal string constant. */
2222 fmt_str = c_getstr (fmt);
2223 if (fmt_str != NULL)
2225 /* If the format doesn't contain % args or %%, we know the size. */
2226 if (strchr (fmt_str, target_percent) == 0)
2228 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2229 len = build_int_cstu (size_type_node, strlen (fmt_str));
2231 /* If the format is "%s" and first ... argument is a string literal,
2232 we know the size too. */
2233 else if (fcode == BUILT_IN_SPRINTF_CHK
2234 && strcmp (fmt_str, target_percent_s) == 0)
2236 tree arg;
2238 if (nargs == 5)
2240 arg = gimple_call_arg (stmt, 4);
2241 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2243 len = c_strlen (arg, 1);
2244 if (! len || ! tree_fits_uhwi_p (len))
2245 len = NULL_TREE;
2251 if (! integer_all_onesp (size))
2253 if (! len || ! tree_int_cst_lt (len, size))
2254 return false;
2257 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2258 or if format doesn't contain % chars or is "%s". */
2259 if (! integer_zerop (flag))
2261 if (fmt_str == NULL)
2262 return false;
2263 if (strchr (fmt_str, target_percent) != NULL
2264 && strcmp (fmt_str, target_percent_s))
2265 return false;
2268 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2269 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2270 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2271 if (!fn)
2272 return false;
2274 /* Replace the called function and the first 4 argument by 2 retaining
2275 trailing varargs. */
2276 gimple_call_set_fndecl (stmt, fn);
2277 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2278 gimple_call_set_arg (stmt, 0, dest);
2279 gimple_call_set_arg (stmt, 1, fmt);
2280 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2281 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2282 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2283 fold_stmt (gsi);
2284 return true;
2287 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2288 ORIG may be null if this is a 2-argument call. We don't attempt to
2289 simplify calls with more than 3 arguments.
2291 Return NULL_TREE if no simplification was possible, otherwise return the
2292 simplified form of the call as a tree. If IGNORED is true, it means that
2293 the caller does not use the returned value of the function. */
2295 static bool
2296 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2298 gimple stmt = gsi_stmt (*gsi);
2299 tree dest = gimple_call_arg (stmt, 0);
2300 tree fmt = gimple_call_arg (stmt, 1);
2301 tree orig = NULL_TREE;
2302 const char *fmt_str = NULL;
2304 /* Verify the required arguments in the original call. We deal with two
2305 types of sprintf() calls: 'sprintf (str, fmt)' and
2306 'sprintf (dest, "%s", orig)'. */
2307 if (gimple_call_num_args (stmt) > 3)
2308 return false;
2310 if (gimple_call_num_args (stmt) == 3)
2311 orig = gimple_call_arg (stmt, 2);
2313 /* Check whether the format is a literal string constant. */
2314 fmt_str = c_getstr (fmt);
2315 if (fmt_str == NULL)
2316 return false;
2318 if (!init_target_chars ())
2319 return false;
2321 /* If the format doesn't contain % args or %%, use strcpy. */
2322 if (strchr (fmt_str, target_percent) == NULL)
2324 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2326 if (!fn)
2327 return false;
2329 /* Don't optimize sprintf (buf, "abc", ptr++). */
2330 if (orig)
2331 return false;
2333 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2334 'format' is known to contain no % formats. */
2335 gimple_seq stmts = NULL;
2336 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2337 gimple_seq_add_stmt_without_update (&stmts, repl);
2338 if (gimple_call_lhs (stmt))
2340 repl = gimple_build_assign (gimple_call_lhs (stmt),
2341 build_int_cst (integer_type_node,
2342 strlen (fmt_str)));
2343 gimple_seq_add_stmt_without_update (&stmts, repl);
2344 gsi_replace_with_seq_vops (gsi, stmts);
2345 /* gsi now points at the assignment to the lhs, get a
2346 stmt iterator to the memcpy call.
2347 ??? We can't use gsi_for_stmt as that doesn't work when the
2348 CFG isn't built yet. */
2349 gimple_stmt_iterator gsi2 = *gsi;
2350 gsi_prev (&gsi2);
2351 fold_stmt (&gsi2);
2353 else
2355 gsi_replace_with_seq_vops (gsi, stmts);
2356 fold_stmt (gsi);
2358 return true;
2361 /* If the format is "%s", use strcpy if the result isn't used. */
2362 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2364 tree fn;
2365 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2367 if (!fn)
2368 return false;
2370 /* Don't crash on sprintf (str1, "%s"). */
2371 if (!orig)
2372 return false;
2374 tree orig_len = NULL_TREE;
2375 if (gimple_call_lhs (stmt))
2377 orig_len = get_maxval_strlen (orig, 0);
2378 if (!orig_len)
2379 return false;
2382 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2383 gimple_seq stmts = NULL;
2384 gimple repl = gimple_build_call (fn, 2, dest, orig);
2385 gimple_seq_add_stmt_without_update (&stmts, repl);
2386 if (gimple_call_lhs (stmt))
2388 if (!useless_type_conversion_p (integer_type_node,
2389 TREE_TYPE (orig_len)))
2390 orig_len = fold_convert (integer_type_node, orig_len);
2391 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2392 gimple_seq_add_stmt_without_update (&stmts, repl);
2393 gsi_replace_with_seq_vops (gsi, stmts);
2394 /* gsi now points at the assignment to the lhs, get a
2395 stmt iterator to the memcpy call.
2396 ??? We can't use gsi_for_stmt as that doesn't work when the
2397 CFG isn't built yet. */
2398 gimple_stmt_iterator gsi2 = *gsi;
2399 gsi_prev (&gsi2);
2400 fold_stmt (&gsi2);
2402 else
2404 gsi_replace_with_seq_vops (gsi, stmts);
2405 fold_stmt (gsi);
2407 return true;
2409 return false;
2412 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2413 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2414 attempt to simplify calls with more than 4 arguments.
2416 Return NULL_TREE if no simplification was possible, otherwise return the
2417 simplified form of the call as a tree. If IGNORED is true, it means that
2418 the caller does not use the returned value of the function. */
2420 static bool
2421 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2423 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2424 tree dest = gimple_call_arg (stmt, 0);
2425 tree destsize = gimple_call_arg (stmt, 1);
2426 tree fmt = gimple_call_arg (stmt, 2);
2427 tree orig = NULL_TREE;
2428 const char *fmt_str = NULL;
2430 if (gimple_call_num_args (stmt) > 4)
2431 return false;
2433 if (gimple_call_num_args (stmt) == 4)
2434 orig = gimple_call_arg (stmt, 3);
2436 if (!tree_fits_uhwi_p (destsize))
2437 return false;
2438 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2440 /* Check whether the format is a literal string constant. */
2441 fmt_str = c_getstr (fmt);
2442 if (fmt_str == NULL)
2443 return false;
2445 if (!init_target_chars ())
2446 return false;
2448 /* If the format doesn't contain % args or %%, use strcpy. */
2449 if (strchr (fmt_str, target_percent) == NULL)
2451 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2452 if (!fn)
2453 return false;
2455 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2456 if (orig)
2457 return false;
2459 /* We could expand this as
2460 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2461 or to
2462 memcpy (str, fmt_with_nul_at_cstm1, cst);
2463 but in the former case that might increase code size
2464 and in the latter case grow .rodata section too much.
2465 So punt for now. */
2466 size_t len = strlen (fmt_str);
2467 if (len >= destlen)
2468 return false;
2470 gimple_seq stmts = NULL;
2471 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2472 gimple_seq_add_stmt_without_update (&stmts, repl);
2473 if (gimple_call_lhs (stmt))
2475 repl = gimple_build_assign (gimple_call_lhs (stmt),
2476 build_int_cst (integer_type_node, len));
2477 gimple_seq_add_stmt_without_update (&stmts, repl);
2478 gsi_replace_with_seq_vops (gsi, stmts);
2479 /* gsi now points at the assignment to the lhs, get a
2480 stmt iterator to the memcpy call.
2481 ??? We can't use gsi_for_stmt as that doesn't work when the
2482 CFG isn't built yet. */
2483 gimple_stmt_iterator gsi2 = *gsi;
2484 gsi_prev (&gsi2);
2485 fold_stmt (&gsi2);
2487 else
2489 gsi_replace_with_seq_vops (gsi, stmts);
2490 fold_stmt (gsi);
2492 return true;
2495 /* If the format is "%s", use strcpy if the result isn't used. */
2496 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2498 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2499 if (!fn)
2500 return false;
2502 /* Don't crash on snprintf (str1, cst, "%s"). */
2503 if (!orig)
2504 return false;
2506 tree orig_len = get_maxval_strlen (orig, 0);
2507 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2508 return false;
2510 /* We could expand this as
2511 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2512 or to
2513 memcpy (str1, str2_with_nul_at_cstm1, cst);
2514 but in the former case that might increase code size
2515 and in the latter case grow .rodata section too much.
2516 So punt for now. */
2517 if (compare_tree_int (orig_len, destlen) >= 0)
2518 return false;
2520 /* Convert snprintf (str1, cst, "%s", str2) into
2521 strcpy (str1, str2) if strlen (str2) < cst. */
2522 gimple_seq stmts = NULL;
2523 gimple repl = gimple_build_call (fn, 2, dest, orig);
2524 gimple_seq_add_stmt_without_update (&stmts, repl);
2525 if (gimple_call_lhs (stmt))
2527 if (!useless_type_conversion_p (integer_type_node,
2528 TREE_TYPE (orig_len)))
2529 orig_len = fold_convert (integer_type_node, orig_len);
2530 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2531 gimple_seq_add_stmt_without_update (&stmts, repl);
2532 gsi_replace_with_seq_vops (gsi, stmts);
2533 /* gsi now points at the assignment to the lhs, get a
2534 stmt iterator to the memcpy call.
2535 ??? We can't use gsi_for_stmt as that doesn't work when the
2536 CFG isn't built yet. */
2537 gimple_stmt_iterator gsi2 = *gsi;
2538 gsi_prev (&gsi2);
2539 fold_stmt (&gsi2);
2541 else
2543 gsi_replace_with_seq_vops (gsi, stmts);
2544 fold_stmt (gsi);
2546 return true;
2548 return false;
2551 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2552 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2553 more than 3 arguments, and ARG may be null in the 2-argument case.
2555 Return NULL_TREE if no simplification was possible, otherwise return the
2556 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2557 code of the function to be simplified. */
2559 static bool
2560 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2561 tree fp, tree fmt, tree arg,
2562 enum built_in_function fcode)
2564 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2565 tree fn_fputc, fn_fputs;
2566 const char *fmt_str = NULL;
2568 /* If the return value is used, don't do the transformation. */
2569 if (gimple_call_lhs (stmt) != NULL_TREE)
2570 return false;
2572 /* Check whether the format is a literal string constant. */
2573 fmt_str = c_getstr (fmt);
2574 if (fmt_str == NULL)
2575 return false;
2577 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2579 /* If we're using an unlocked function, assume the other
2580 unlocked functions exist explicitly. */
2581 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2582 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2584 else
2586 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2587 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2590 if (!init_target_chars ())
2591 return false;
2593 /* If the format doesn't contain % args or %%, use strcpy. */
2594 if (strchr (fmt_str, target_percent) == NULL)
2596 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2597 && arg)
2598 return false;
2600 /* If the format specifier was "", fprintf does nothing. */
2601 if (fmt_str[0] == '\0')
2603 replace_call_with_value (gsi, NULL_TREE);
2604 return true;
2607 /* When "string" doesn't contain %, replace all cases of
2608 fprintf (fp, string) with fputs (string, fp). The fputs
2609 builtin will take care of special cases like length == 1. */
2610 if (fn_fputs)
2612 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2613 replace_call_with_call_and_fold (gsi, repl);
2614 return true;
2618 /* The other optimizations can be done only on the non-va_list variants. */
2619 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2620 return false;
2622 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2623 else if (strcmp (fmt_str, target_percent_s) == 0)
2625 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2626 return false;
2627 if (fn_fputs)
2629 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2630 replace_call_with_call_and_fold (gsi, repl);
2631 return true;
2635 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2636 else if (strcmp (fmt_str, target_percent_c) == 0)
2638 if (!arg
2639 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2640 return false;
2641 if (fn_fputc)
2643 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2644 replace_call_with_call_and_fold (gsi, repl);
2645 return true;
2649 return false;
2652 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2653 FMT and ARG are the arguments to the call; we don't fold cases with
2654 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2656 Return NULL_TREE if no simplification was possible, otherwise return the
2657 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2658 code of the function to be simplified. */
2660 static bool
2661 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2662 tree arg, enum built_in_function fcode)
2664 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2665 tree fn_putchar, fn_puts, newarg;
2666 const char *fmt_str = NULL;
2668 /* If the return value is used, don't do the transformation. */
2669 if (gimple_call_lhs (stmt) != NULL_TREE)
2670 return false;
2672 /* Check whether the format is a literal string constant. */
2673 fmt_str = c_getstr (fmt);
2674 if (fmt_str == NULL)
2675 return false;
2677 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2679 /* If we're using an unlocked function, assume the other
2680 unlocked functions exist explicitly. */
2681 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2682 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2684 else
2686 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2687 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2690 if (!init_target_chars ())
2691 return false;
2693 if (strcmp (fmt_str, target_percent_s) == 0
2694 || strchr (fmt_str, target_percent) == NULL)
2696 const char *str;
2698 if (strcmp (fmt_str, target_percent_s) == 0)
2700 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2701 return false;
2703 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2704 return false;
2706 str = c_getstr (arg);
2707 if (str == NULL)
2708 return false;
2710 else
2712 /* The format specifier doesn't contain any '%' characters. */
2713 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2714 && arg)
2715 return false;
2716 str = fmt_str;
2719 /* If the string was "", printf does nothing. */
2720 if (str[0] == '\0')
2722 replace_call_with_value (gsi, NULL_TREE);
2723 return true;
2726 /* If the string has length of 1, call putchar. */
2727 if (str[1] == '\0')
2729 /* Given printf("c"), (where c is any one character,)
2730 convert "c"[0] to an int and pass that to the replacement
2731 function. */
2732 newarg = build_int_cst (integer_type_node, str[0]);
2733 if (fn_putchar)
2735 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2736 replace_call_with_call_and_fold (gsi, repl);
2737 return true;
2740 else
2742 /* If the string was "string\n", call puts("string"). */
2743 size_t len = strlen (str);
2744 if ((unsigned char)str[len - 1] == target_newline
2745 && (size_t) (int) len == len
2746 && (int) len > 0)
2748 char *newstr;
2749 tree offset_node, string_cst;
2751 /* Create a NUL-terminated string that's one char shorter
2752 than the original, stripping off the trailing '\n'. */
2753 newarg = build_string_literal (len, str);
2754 string_cst = string_constant (newarg, &offset_node);
2755 gcc_checking_assert (string_cst
2756 && (TREE_STRING_LENGTH (string_cst)
2757 == (int) len)
2758 && integer_zerop (offset_node)
2759 && (unsigned char)
2760 TREE_STRING_POINTER (string_cst)[len - 1]
2761 == target_newline);
2762 /* build_string_literal creates a new STRING_CST,
2763 modify it in place to avoid double copying. */
2764 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2765 newstr[len - 1] = '\0';
2766 if (fn_puts)
2768 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2769 replace_call_with_call_and_fold (gsi, repl);
2770 return true;
2773 else
2774 /* We'd like to arrange to call fputs(string,stdout) here,
2775 but we need stdout and don't have a way to get it yet. */
2776 return false;
2780 /* The other optimizations can be done only on the non-va_list variants. */
2781 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2782 return false;
2784 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2785 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2787 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2788 return false;
2789 if (fn_puts)
2791 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2792 replace_call_with_call_and_fold (gsi, repl);
2793 return true;
2797 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2798 else if (strcmp (fmt_str, target_percent_c) == 0)
2800 if (!arg || ! useless_type_conversion_p (integer_type_node,
2801 TREE_TYPE (arg)))
2802 return false;
2803 if (fn_putchar)
2805 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2806 replace_call_with_call_and_fold (gsi, repl);
2807 return true;
2811 return false;
2816 /* Fold a call to __builtin_strlen with known length LEN. */
2818 static bool
2819 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2821 gimple stmt = gsi_stmt (*gsi);
2822 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2823 if (!len)
2824 return false;
2825 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2826 replace_call_with_value (gsi, len);
2827 return true;
2831 /* Fold the non-target builtin at *GSI and return whether any simplification
2832 was made. */
2834 static bool
2835 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2837 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2838 tree callee = gimple_call_fndecl (stmt);
2840 /* Give up for always_inline inline builtins until they are
2841 inlined. */
2842 if (avoid_folding_inline_builtin (callee))
2843 return false;
2845 unsigned n = gimple_call_num_args (stmt);
2846 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2847 switch (fcode)
2849 case BUILT_IN_BZERO:
2850 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2851 gimple_call_arg (stmt, 1));
2852 case BUILT_IN_MEMSET:
2853 return gimple_fold_builtin_memset (gsi,
2854 gimple_call_arg (stmt, 1),
2855 gimple_call_arg (stmt, 2));
2856 case BUILT_IN_BCOPY:
2857 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2858 gimple_call_arg (stmt, 0), 3);
2859 case BUILT_IN_MEMCPY:
2860 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2861 gimple_call_arg (stmt, 1), 0);
2862 case BUILT_IN_MEMPCPY:
2863 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2864 gimple_call_arg (stmt, 1), 1);
2865 case BUILT_IN_MEMMOVE:
2866 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2867 gimple_call_arg (stmt, 1), 3);
2868 case BUILT_IN_SPRINTF_CHK:
2869 case BUILT_IN_VSPRINTF_CHK:
2870 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2871 case BUILT_IN_STRCAT_CHK:
2872 return gimple_fold_builtin_strcat_chk (gsi);
2873 case BUILT_IN_STRNCAT_CHK:
2874 return gimple_fold_builtin_strncat_chk (gsi);
2875 case BUILT_IN_STRLEN:
2876 return gimple_fold_builtin_strlen (gsi);
2877 case BUILT_IN_STRCPY:
2878 return gimple_fold_builtin_strcpy (gsi,
2879 gimple_call_arg (stmt, 0),
2880 gimple_call_arg (stmt, 1));
2881 case BUILT_IN_STRNCPY:
2882 return gimple_fold_builtin_strncpy (gsi,
2883 gimple_call_arg (stmt, 0),
2884 gimple_call_arg (stmt, 1),
2885 gimple_call_arg (stmt, 2));
2886 case BUILT_IN_STRCAT:
2887 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2888 gimple_call_arg (stmt, 1));
2889 case BUILT_IN_STRNCAT:
2890 return gimple_fold_builtin_strncat (gsi);
2891 case BUILT_IN_FPUTS:
2892 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2893 gimple_call_arg (stmt, 1), false);
2894 case BUILT_IN_FPUTS_UNLOCKED:
2895 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2896 gimple_call_arg (stmt, 1), true);
2897 case BUILT_IN_MEMCPY_CHK:
2898 case BUILT_IN_MEMPCPY_CHK:
2899 case BUILT_IN_MEMMOVE_CHK:
2900 case BUILT_IN_MEMSET_CHK:
2901 return gimple_fold_builtin_memory_chk (gsi,
2902 gimple_call_arg (stmt, 0),
2903 gimple_call_arg (stmt, 1),
2904 gimple_call_arg (stmt, 2),
2905 gimple_call_arg (stmt, 3),
2906 fcode);
2907 case BUILT_IN_STPCPY:
2908 return gimple_fold_builtin_stpcpy (gsi);
2909 case BUILT_IN_STRCPY_CHK:
2910 case BUILT_IN_STPCPY_CHK:
2911 return gimple_fold_builtin_stxcpy_chk (gsi,
2912 gimple_call_arg (stmt, 0),
2913 gimple_call_arg (stmt, 1),
2914 gimple_call_arg (stmt, 2),
2915 fcode);
2916 case BUILT_IN_STRNCPY_CHK:
2917 case BUILT_IN_STPNCPY_CHK:
2918 return gimple_fold_builtin_stxncpy_chk (gsi,
2919 gimple_call_arg (stmt, 0),
2920 gimple_call_arg (stmt, 1),
2921 gimple_call_arg (stmt, 2),
2922 gimple_call_arg (stmt, 3),
2923 fcode);
2924 case BUILT_IN_SNPRINTF_CHK:
2925 case BUILT_IN_VSNPRINTF_CHK:
2926 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2927 case BUILT_IN_SNPRINTF:
2928 return gimple_fold_builtin_snprintf (gsi);
2929 case BUILT_IN_SPRINTF:
2930 return gimple_fold_builtin_sprintf (gsi);
2931 case BUILT_IN_FPRINTF:
2932 case BUILT_IN_FPRINTF_UNLOCKED:
2933 case BUILT_IN_VFPRINTF:
2934 if (n == 2 || n == 3)
2935 return gimple_fold_builtin_fprintf (gsi,
2936 gimple_call_arg (stmt, 0),
2937 gimple_call_arg (stmt, 1),
2938 n == 3
2939 ? gimple_call_arg (stmt, 2)
2940 : NULL_TREE,
2941 fcode);
2942 break;
2943 case BUILT_IN_FPRINTF_CHK:
2944 case BUILT_IN_VFPRINTF_CHK:
2945 if (n == 3 || n == 4)
2946 return gimple_fold_builtin_fprintf (gsi,
2947 gimple_call_arg (stmt, 0),
2948 gimple_call_arg (stmt, 2),
2949 n == 4
2950 ? gimple_call_arg (stmt, 3)
2951 : NULL_TREE,
2952 fcode);
2953 break;
2954 case BUILT_IN_PRINTF:
2955 case BUILT_IN_PRINTF_UNLOCKED:
2956 case BUILT_IN_VPRINTF:
2957 if (n == 1 || n == 2)
2958 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2959 n == 2
2960 ? gimple_call_arg (stmt, 1)
2961 : NULL_TREE, fcode);
2962 break;
2963 case BUILT_IN_PRINTF_CHK:
2964 case BUILT_IN_VPRINTF_CHK:
2965 if (n == 2 || n == 3)
2966 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2967 n == 3
2968 ? gimple_call_arg (stmt, 2)
2969 : NULL_TREE, fcode);
2970 default:;
2973 /* Try the generic builtin folder. */
2974 bool ignore = (gimple_call_lhs (stmt) == NULL);
2975 tree result = fold_call_stmt (stmt, ignore);
2976 if (result)
2978 if (ignore)
2979 STRIP_NOPS (result);
2980 else
2981 result = fold_convert (gimple_call_return_type (stmt), result);
2982 if (!update_call_from_tree (gsi, result))
2983 gimplify_and_update_call_from_tree (gsi, result);
2984 return true;
2987 return false;
2990 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2991 doesn't fit into TYPE. The test for overflow should be regardless of
2992 -fwrapv, and even for unsigned types. */
2994 bool
2995 arith_overflowed_p (enum tree_code code, const_tree type,
2996 const_tree arg0, const_tree arg1)
2998 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2999 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3000 widest2_int_cst;
3001 widest2_int warg0 = widest2_int_cst (arg0);
3002 widest2_int warg1 = widest2_int_cst (arg1);
3003 widest2_int wres;
3004 switch (code)
3006 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3007 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3008 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3009 default: gcc_unreachable ();
3011 signop sign = TYPE_SIGN (type);
3012 if (sign == UNSIGNED && wi::neg_p (wres))
3013 return true;
3014 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3017 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3018 The statement may be replaced by another statement, e.g., if the call
3019 simplifies to a constant value. Return true if any changes were made.
3020 It is assumed that the operands have been previously folded. */
3022 static bool
3023 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3025 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3026 tree callee;
3027 bool changed = false;
3028 unsigned i;
3030 /* Fold *& in call arguments. */
3031 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3032 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3034 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3035 if (tmp)
3037 gimple_call_set_arg (stmt, i, tmp);
3038 changed = true;
3042 /* Check for virtual calls that became direct calls. */
3043 callee = gimple_call_fn (stmt);
3044 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3046 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3048 if (dump_file && virtual_method_call_p (callee)
3049 && !possible_polymorphic_call_target_p
3050 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3051 (OBJ_TYPE_REF_EXPR (callee)))))
3053 fprintf (dump_file,
3054 "Type inheritance inconsistent devirtualization of ");
3055 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3056 fprintf (dump_file, " to ");
3057 print_generic_expr (dump_file, callee, TDF_SLIM);
3058 fprintf (dump_file, "\n");
3061 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3062 changed = true;
3064 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3066 bool final;
3067 vec <cgraph_node *>targets
3068 = possible_polymorphic_call_targets (callee, stmt, &final);
3069 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3071 tree lhs = gimple_call_lhs (stmt);
3072 if (dump_enabled_p ())
3074 location_t loc = gimple_location_safe (stmt);
3075 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3076 "folding virtual function call to %s\n",
3077 targets.length () == 1
3078 ? targets[0]->name ()
3079 : "__builtin_unreachable");
3081 if (targets.length () == 1)
3083 gimple_call_set_fndecl (stmt, targets[0]->decl);
3084 changed = true;
3085 /* If the call becomes noreturn, remove the lhs. */
3086 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3088 if (TREE_CODE (lhs) == SSA_NAME)
3090 tree var = create_tmp_var (TREE_TYPE (lhs));
3091 tree def = get_or_create_ssa_default_def (cfun, var);
3092 gimple new_stmt = gimple_build_assign (lhs, def);
3093 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3095 gimple_call_set_lhs (stmt, NULL_TREE);
3097 maybe_remove_unused_call_args (cfun, stmt);
3099 else
3101 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3102 gimple new_stmt = gimple_build_call (fndecl, 0);
3103 gimple_set_location (new_stmt, gimple_location (stmt));
3104 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3106 tree var = create_tmp_var (TREE_TYPE (lhs));
3107 tree def = get_or_create_ssa_default_def (cfun, var);
3109 /* To satisfy condition for
3110 cgraph_update_edges_for_call_stmt_node,
3111 we need to preserve GIMPLE_CALL statement
3112 at position of GSI iterator. */
3113 update_call_from_tree (gsi, def);
3114 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3116 else
3118 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3119 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3120 gsi_replace (gsi, new_stmt, false);
3122 return true;
3128 /* Check for indirect calls that became direct calls, and then
3129 no longer require a static chain. */
3130 if (gimple_call_chain (stmt))
3132 tree fn = gimple_call_fndecl (stmt);
3133 if (fn && !DECL_STATIC_CHAIN (fn))
3135 gimple_call_set_chain (stmt, NULL);
3136 changed = true;
3138 else
3140 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3141 if (tmp)
3143 gimple_call_set_chain (stmt, tmp);
3144 changed = true;
3149 if (inplace)
3150 return changed;
3152 /* Check for builtins that CCP can handle using information not
3153 available in the generic fold routines. */
3154 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3156 if (gimple_fold_builtin (gsi))
3157 changed = true;
3159 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3161 changed |= targetm.gimple_fold_builtin (gsi);
3163 else if (gimple_call_internal_p (stmt))
3165 enum tree_code subcode = ERROR_MARK;
3166 tree result = NULL_TREE;
3167 bool cplx_result = false;
3168 tree overflow = NULL_TREE;
3169 switch (gimple_call_internal_fn (stmt))
3171 case IFN_BUILTIN_EXPECT:
3172 result = fold_builtin_expect (gimple_location (stmt),
3173 gimple_call_arg (stmt, 0),
3174 gimple_call_arg (stmt, 1),
3175 gimple_call_arg (stmt, 2));
3176 break;
3177 case IFN_UBSAN_OBJECT_SIZE:
3178 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3179 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3180 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3181 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3182 gimple_call_arg (stmt, 2))))
3184 gsi_replace (gsi, gimple_build_nop (), true);
3185 unlink_stmt_vdef (stmt);
3186 release_defs (stmt);
3187 return true;
3189 break;
3190 case IFN_UBSAN_CHECK_ADD:
3191 subcode = PLUS_EXPR;
3192 break;
3193 case IFN_UBSAN_CHECK_SUB:
3194 subcode = MINUS_EXPR;
3195 break;
3196 case IFN_UBSAN_CHECK_MUL:
3197 subcode = MULT_EXPR;
3198 break;
3199 case IFN_ADD_OVERFLOW:
3200 subcode = PLUS_EXPR;
3201 cplx_result = true;
3202 break;
3203 case IFN_SUB_OVERFLOW:
3204 subcode = MINUS_EXPR;
3205 cplx_result = true;
3206 break;
3207 case IFN_MUL_OVERFLOW:
3208 subcode = MULT_EXPR;
3209 cplx_result = true;
3210 break;
3211 default:
3212 break;
3214 if (subcode != ERROR_MARK)
3216 tree arg0 = gimple_call_arg (stmt, 0);
3217 tree arg1 = gimple_call_arg (stmt, 1);
3218 tree type = TREE_TYPE (arg0);
3219 if (cplx_result)
3221 tree lhs = gimple_call_lhs (stmt);
3222 if (lhs == NULL_TREE)
3223 type = NULL_TREE;
3224 else
3225 type = TREE_TYPE (TREE_TYPE (lhs));
3227 if (type == NULL_TREE)
3229 /* x = y + 0; x = y - 0; x = y * 0; */
3230 else if (integer_zerop (arg1))
3231 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3232 /* x = 0 + y; x = 0 * y; */
3233 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3234 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3235 /* x = y - y; */
3236 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3237 result = integer_zero_node;
3238 /* x = y * 1; x = 1 * y; */
3239 else if (subcode == MULT_EXPR && integer_onep (arg1))
3240 result = arg0;
3241 else if (subcode == MULT_EXPR && integer_onep (arg0))
3242 result = arg1;
3243 else if (TREE_CODE (arg0) == INTEGER_CST
3244 && TREE_CODE (arg1) == INTEGER_CST)
3246 if (cplx_result)
3247 result = int_const_binop (subcode, fold_convert (type, arg0),
3248 fold_convert (type, arg1));
3249 else
3250 result = int_const_binop (subcode, arg0, arg1);
3251 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3253 if (cplx_result)
3254 overflow = build_one_cst (type);
3255 else
3256 result = NULL_TREE;
3259 if (result)
3261 if (result == integer_zero_node)
3262 result = build_zero_cst (type);
3263 else if (cplx_result && TREE_TYPE (result) != type)
3265 if (TREE_CODE (result) == INTEGER_CST)
3267 if (arith_overflowed_p (PLUS_EXPR, type, result,
3268 integer_zero_node))
3269 overflow = build_one_cst (type);
3271 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3272 && TYPE_UNSIGNED (type))
3273 || (TYPE_PRECISION (type)
3274 < (TYPE_PRECISION (TREE_TYPE (result))
3275 + (TYPE_UNSIGNED (TREE_TYPE (result))
3276 && !TYPE_UNSIGNED (type)))))
3277 result = NULL_TREE;
3278 if (result)
3279 result = fold_convert (type, result);
3284 if (result)
3286 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3287 result = drop_tree_overflow (result);
3288 if (cplx_result)
3290 if (overflow == NULL_TREE)
3291 overflow = build_zero_cst (TREE_TYPE (result));
3292 tree ctype = build_complex_type (TREE_TYPE (result));
3293 if (TREE_CODE (result) == INTEGER_CST
3294 && TREE_CODE (overflow) == INTEGER_CST)
3295 result = build_complex (ctype, result, overflow);
3296 else
3297 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3298 ctype, result, overflow);
3300 if (!update_call_from_tree (gsi, result))
3301 gimplify_and_update_call_from_tree (gsi, result);
3302 changed = true;
3306 return changed;
3310 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3311 gimple_simplify.
3313 Replaces *GSI with the simplification result in RCODE and OPS
3314 and the associated statements in *SEQ. Does the replacement
3315 according to INPLACE and returns true if the operation succeeded. */
3317 static bool
3318 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3319 code_helper rcode, tree *ops,
3320 gimple_seq *seq, bool inplace)
3322 gimple stmt = gsi_stmt (*gsi);
3324 /* Play safe and do not allow abnormals to be mentioned in
3325 newly created statements. See also maybe_push_res_to_seq. */
3326 if ((TREE_CODE (ops[0]) == SSA_NAME
3327 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
3328 || (ops[1]
3329 && TREE_CODE (ops[1]) == SSA_NAME
3330 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
3331 || (ops[2]
3332 && TREE_CODE (ops[2]) == SSA_NAME
3333 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
3334 return false;
3336 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3338 gcc_assert (rcode.is_tree_code ());
3339 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3340 /* GIMPLE_CONDs condition may not throw. */
3341 && (!flag_exceptions
3342 || !cfun->can_throw_non_call_exceptions
3343 || !operation_could_trap_p (rcode,
3344 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3345 false, NULL_TREE)))
3346 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3347 else if (rcode == SSA_NAME)
3348 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3349 build_zero_cst (TREE_TYPE (ops[0])));
3350 else if (rcode == INTEGER_CST)
3352 if (integer_zerop (ops[0]))
3353 gimple_cond_make_false (cond_stmt);
3354 else
3355 gimple_cond_make_true (cond_stmt);
3357 else if (!inplace)
3359 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3360 ops, seq);
3361 if (!res)
3362 return false;
3363 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3364 build_zero_cst (TREE_TYPE (res)));
3366 else
3367 return false;
3368 if (dump_file && (dump_flags & TDF_DETAILS))
3370 fprintf (dump_file, "gimple_simplified to ");
3371 if (!gimple_seq_empty_p (*seq))
3372 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3373 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3374 0, TDF_SLIM);
3376 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3377 return true;
3379 else if (is_gimple_assign (stmt)
3380 && rcode.is_tree_code ())
3382 if (!inplace
3383 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3385 maybe_build_generic_op (rcode,
3386 TREE_TYPE (gimple_assign_lhs (stmt)),
3387 &ops[0], ops[1], ops[2]);
3388 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3389 if (dump_file && (dump_flags & TDF_DETAILS))
3391 fprintf (dump_file, "gimple_simplified to ");
3392 if (!gimple_seq_empty_p (*seq))
3393 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3394 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3395 0, TDF_SLIM);
3397 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3398 return true;
3401 else if (!inplace)
3403 if (gimple_has_lhs (stmt))
3405 tree lhs = gimple_get_lhs (stmt);
3406 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3407 ops, seq, lhs))
3408 return false;
3409 if (dump_file && (dump_flags & TDF_DETAILS))
3411 fprintf (dump_file, "gimple_simplified to ");
3412 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3414 gsi_replace_with_seq_vops (gsi, *seq);
3415 return true;
3417 else
3418 gcc_unreachable ();
3421 return false;
3424 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3426 static bool
3427 maybe_canonicalize_mem_ref_addr (tree *t)
3429 bool res = false;
3431 if (TREE_CODE (*t) == ADDR_EXPR)
3432 t = &TREE_OPERAND (*t, 0);
3434 while (handled_component_p (*t))
3435 t = &TREE_OPERAND (*t, 0);
3437 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3438 of invariant addresses into a SSA name MEM_REF address. */
3439 if (TREE_CODE (*t) == MEM_REF
3440 || TREE_CODE (*t) == TARGET_MEM_REF)
3442 tree addr = TREE_OPERAND (*t, 0);
3443 if (TREE_CODE (addr) == ADDR_EXPR
3444 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3445 || handled_component_p (TREE_OPERAND (addr, 0))))
3447 tree base;
3448 HOST_WIDE_INT coffset;
3449 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3450 &coffset);
3451 if (!base)
3452 gcc_unreachable ();
3454 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3455 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3456 TREE_OPERAND (*t, 1),
3457 size_int (coffset));
3458 res = true;
3460 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3461 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3464 /* Canonicalize back MEM_REFs to plain reference trees if the object
3465 accessed is a decl that has the same access semantics as the MEM_REF. */
3466 if (TREE_CODE (*t) == MEM_REF
3467 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3468 && integer_zerop (TREE_OPERAND (*t, 1))
3469 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3471 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3472 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3473 if (/* Same volatile qualification. */
3474 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3475 /* Same TBAA behavior with -fstrict-aliasing. */
3476 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3477 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3478 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3479 /* Same alignment. */
3480 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3481 /* We have to look out here to not drop a required conversion
3482 from the rhs to the lhs if *t appears on the lhs or vice-versa
3483 if it appears on the rhs. Thus require strict type
3484 compatibility. */
3485 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3487 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3488 res = true;
3492 /* Canonicalize TARGET_MEM_REF in particular with respect to
3493 the indexes becoming constant. */
3494 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3496 tree tem = maybe_fold_tmr (*t);
3497 if (tem)
3499 *t = tem;
3500 res = true;
3504 return res;
3507 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3508 distinguishes both cases. */
3510 static bool
3511 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3513 bool changed = false;
3514 gimple stmt = gsi_stmt (*gsi);
3515 unsigned i;
3517 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3518 after propagation.
3519 ??? This shouldn't be done in generic folding but in the
3520 propagation helpers which also know whether an address was
3521 propagated. */
3522 switch (gimple_code (stmt))
3524 case GIMPLE_ASSIGN:
3525 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3527 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3528 if ((REFERENCE_CLASS_P (*rhs)
3529 || TREE_CODE (*rhs) == ADDR_EXPR)
3530 && maybe_canonicalize_mem_ref_addr (rhs))
3531 changed = true;
3532 tree *lhs = gimple_assign_lhs_ptr (stmt);
3533 if (REFERENCE_CLASS_P (*lhs)
3534 && maybe_canonicalize_mem_ref_addr (lhs))
3535 changed = true;
3537 break;
3538 case GIMPLE_CALL:
3540 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3542 tree *arg = gimple_call_arg_ptr (stmt, i);
3543 if (REFERENCE_CLASS_P (*arg)
3544 && maybe_canonicalize_mem_ref_addr (arg))
3545 changed = true;
3547 tree *lhs = gimple_call_lhs_ptr (stmt);
3548 if (*lhs
3549 && REFERENCE_CLASS_P (*lhs)
3550 && maybe_canonicalize_mem_ref_addr (lhs))
3551 changed = true;
3552 break;
3554 case GIMPLE_ASM:
3556 gasm *asm_stmt = as_a <gasm *> (stmt);
3557 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3559 tree link = gimple_asm_output_op (asm_stmt, i);
3560 tree op = TREE_VALUE (link);
3561 if (REFERENCE_CLASS_P (op)
3562 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3563 changed = true;
3565 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3567 tree link = gimple_asm_input_op (asm_stmt, i);
3568 tree op = TREE_VALUE (link);
3569 if ((REFERENCE_CLASS_P (op)
3570 || TREE_CODE (op) == ADDR_EXPR)
3571 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3572 changed = true;
3575 break;
3576 case GIMPLE_DEBUG:
3577 if (gimple_debug_bind_p (stmt))
3579 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3580 if (*val
3581 && (REFERENCE_CLASS_P (*val)
3582 || TREE_CODE (*val) == ADDR_EXPR)
3583 && maybe_canonicalize_mem_ref_addr (val))
3584 changed = true;
3586 break;
3587 default:;
3590 /* Dispatch to pattern-based folding. */
3591 if (!inplace
3592 || is_gimple_assign (stmt)
3593 || gimple_code (stmt) == GIMPLE_COND)
3595 gimple_seq seq = NULL;
3596 code_helper rcode;
3597 tree ops[3] = {};
3598 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3599 valueize, valueize))
3601 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3602 changed = true;
3603 else
3604 gimple_seq_discard (seq);
3608 stmt = gsi_stmt (*gsi);
3610 /* Fold the main computation performed by the statement. */
3611 switch (gimple_code (stmt))
3613 case GIMPLE_ASSIGN:
3615 unsigned old_num_ops = gimple_num_ops (stmt);
3616 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3617 tree lhs = gimple_assign_lhs (stmt);
3618 tree new_rhs;
3619 /* First canonicalize operand order. This avoids building new
3620 trees if this is the only thing fold would later do. */
3621 if ((commutative_tree_code (subcode)
3622 || commutative_ternary_tree_code (subcode))
3623 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3624 gimple_assign_rhs2 (stmt), false))
3626 tree tem = gimple_assign_rhs1 (stmt);
3627 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3628 gimple_assign_set_rhs2 (stmt, tem);
3629 changed = true;
3631 new_rhs = fold_gimple_assign (gsi);
3632 if (new_rhs
3633 && !useless_type_conversion_p (TREE_TYPE (lhs),
3634 TREE_TYPE (new_rhs)))
3635 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3636 if (new_rhs
3637 && (!inplace
3638 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3640 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3641 changed = true;
3643 break;
3646 case GIMPLE_COND:
3647 changed |= fold_gimple_cond (as_a <gcond *> (stmt));
3648 break;
3650 case GIMPLE_CALL:
3651 changed |= gimple_fold_call (gsi, inplace);
3652 break;
3654 case GIMPLE_ASM:
3655 /* Fold *& in asm operands. */
3657 gasm *asm_stmt = as_a <gasm *> (stmt);
3658 size_t noutputs;
3659 const char **oconstraints;
3660 const char *constraint;
3661 bool allows_mem, allows_reg;
3663 noutputs = gimple_asm_noutputs (asm_stmt);
3664 oconstraints = XALLOCAVEC (const char *, noutputs);
3666 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3668 tree link = gimple_asm_output_op (asm_stmt, i);
3669 tree op = TREE_VALUE (link);
3670 oconstraints[i]
3671 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3672 if (REFERENCE_CLASS_P (op)
3673 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3675 TREE_VALUE (link) = op;
3676 changed = true;
3679 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3681 tree link = gimple_asm_input_op (asm_stmt, i);
3682 tree op = TREE_VALUE (link);
3683 constraint
3684 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3685 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3686 oconstraints, &allows_mem, &allows_reg);
3687 if (REFERENCE_CLASS_P (op)
3688 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3689 != NULL_TREE)
3691 TREE_VALUE (link) = op;
3692 changed = true;
3696 break;
3698 case GIMPLE_DEBUG:
3699 if (gimple_debug_bind_p (stmt))
3701 tree val = gimple_debug_bind_get_value (stmt);
3702 if (val
3703 && REFERENCE_CLASS_P (val))
3705 tree tem = maybe_fold_reference (val, false);
3706 if (tem)
3708 gimple_debug_bind_set_value (stmt, tem);
3709 changed = true;
3712 else if (val
3713 && TREE_CODE (val) == ADDR_EXPR)
3715 tree ref = TREE_OPERAND (val, 0);
3716 tree tem = maybe_fold_reference (ref, false);
3717 if (tem)
3719 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3720 gimple_debug_bind_set_value (stmt, tem);
3721 changed = true;
3725 break;
3727 default:;
3730 stmt = gsi_stmt (*gsi);
3732 /* Fold *& on the lhs. */
3733 if (gimple_has_lhs (stmt))
3735 tree lhs = gimple_get_lhs (stmt);
3736 if (lhs && REFERENCE_CLASS_P (lhs))
3738 tree new_lhs = maybe_fold_reference (lhs, true);
3739 if (new_lhs)
3741 gimple_set_lhs (stmt, new_lhs);
3742 changed = true;
3747 return changed;
3750 /* Valueziation callback that ends up not following SSA edges. */
3752 tree
3753 no_follow_ssa_edges (tree)
3755 return NULL_TREE;
3758 /* Valueization callback that ends up following single-use SSA edges only. */
3760 tree
3761 follow_single_use_edges (tree val)
3763 if (TREE_CODE (val) == SSA_NAME
3764 && !has_single_use (val))
3765 return NULL_TREE;
3766 return val;
3769 /* Fold the statement pointed to by GSI. In some cases, this function may
3770 replace the whole statement with a new one. Returns true iff folding
3771 makes any changes.
3772 The statement pointed to by GSI should be in valid gimple form but may
3773 be in unfolded state as resulting from for example constant propagation
3774 which can produce *&x = 0. */
3776 bool
3777 fold_stmt (gimple_stmt_iterator *gsi)
3779 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3782 bool
3783 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3785 return fold_stmt_1 (gsi, false, valueize);
3788 /* Perform the minimal folding on statement *GSI. Only operations like
3789 *&x created by constant propagation are handled. The statement cannot
3790 be replaced with a new one. Return true if the statement was
3791 changed, false otherwise.
3792 The statement *GSI should be in valid gimple form but may
3793 be in unfolded state as resulting from for example constant propagation
3794 which can produce *&x = 0. */
3796 bool
3797 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3799 gimple stmt = gsi_stmt (*gsi);
3800 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3801 gcc_assert (gsi_stmt (*gsi) == stmt);
3802 return changed;
3805 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3806 if EXPR is null or we don't know how.
3807 If non-null, the result always has boolean type. */
3809 static tree
3810 canonicalize_bool (tree expr, bool invert)
3812 if (!expr)
3813 return NULL_TREE;
3814 else if (invert)
3816 if (integer_nonzerop (expr))
3817 return boolean_false_node;
3818 else if (integer_zerop (expr))
3819 return boolean_true_node;
3820 else if (TREE_CODE (expr) == SSA_NAME)
3821 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3822 build_int_cst (TREE_TYPE (expr), 0));
3823 else if (COMPARISON_CLASS_P (expr))
3824 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3825 boolean_type_node,
3826 TREE_OPERAND (expr, 0),
3827 TREE_OPERAND (expr, 1));
3828 else
3829 return NULL_TREE;
3831 else
3833 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3834 return expr;
3835 if (integer_nonzerop (expr))
3836 return boolean_true_node;
3837 else if (integer_zerop (expr))
3838 return boolean_false_node;
3839 else if (TREE_CODE (expr) == SSA_NAME)
3840 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3841 build_int_cst (TREE_TYPE (expr), 0));
3842 else if (COMPARISON_CLASS_P (expr))
3843 return fold_build2 (TREE_CODE (expr),
3844 boolean_type_node,
3845 TREE_OPERAND (expr, 0),
3846 TREE_OPERAND (expr, 1));
3847 else
3848 return NULL_TREE;
3852 /* Check to see if a boolean expression EXPR is logically equivalent to the
3853 comparison (OP1 CODE OP2). Check for various identities involving
3854 SSA_NAMEs. */
3856 static bool
3857 same_bool_comparison_p (const_tree expr, enum tree_code code,
3858 const_tree op1, const_tree op2)
3860 gimple s;
3862 /* The obvious case. */
3863 if (TREE_CODE (expr) == code
3864 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3865 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3866 return true;
3868 /* Check for comparing (name, name != 0) and the case where expr
3869 is an SSA_NAME with a definition matching the comparison. */
3870 if (TREE_CODE (expr) == SSA_NAME
3871 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3873 if (operand_equal_p (expr, op1, 0))
3874 return ((code == NE_EXPR && integer_zerop (op2))
3875 || (code == EQ_EXPR && integer_nonzerop (op2)));
3876 s = SSA_NAME_DEF_STMT (expr);
3877 if (is_gimple_assign (s)
3878 && gimple_assign_rhs_code (s) == code
3879 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3880 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3881 return true;
3884 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3885 of name is a comparison, recurse. */
3886 if (TREE_CODE (op1) == SSA_NAME
3887 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3889 s = SSA_NAME_DEF_STMT (op1);
3890 if (is_gimple_assign (s)
3891 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3893 enum tree_code c = gimple_assign_rhs_code (s);
3894 if ((c == NE_EXPR && integer_zerop (op2))
3895 || (c == EQ_EXPR && integer_nonzerop (op2)))
3896 return same_bool_comparison_p (expr, c,
3897 gimple_assign_rhs1 (s),
3898 gimple_assign_rhs2 (s));
3899 if ((c == EQ_EXPR && integer_zerop (op2))
3900 || (c == NE_EXPR && integer_nonzerop (op2)))
3901 return same_bool_comparison_p (expr,
3902 invert_tree_comparison (c, false),
3903 gimple_assign_rhs1 (s),
3904 gimple_assign_rhs2 (s));
3907 return false;
3910 /* Check to see if two boolean expressions OP1 and OP2 are logically
3911 equivalent. */
3913 static bool
3914 same_bool_result_p (const_tree op1, const_tree op2)
3916 /* Simple cases first. */
3917 if (operand_equal_p (op1, op2, 0))
3918 return true;
3920 /* Check the cases where at least one of the operands is a comparison.
3921 These are a bit smarter than operand_equal_p in that they apply some
3922 identifies on SSA_NAMEs. */
3923 if (COMPARISON_CLASS_P (op2)
3924 && same_bool_comparison_p (op1, TREE_CODE (op2),
3925 TREE_OPERAND (op2, 0),
3926 TREE_OPERAND (op2, 1)))
3927 return true;
3928 if (COMPARISON_CLASS_P (op1)
3929 && same_bool_comparison_p (op2, TREE_CODE (op1),
3930 TREE_OPERAND (op1, 0),
3931 TREE_OPERAND (op1, 1)))
3932 return true;
3934 /* Default case. */
3935 return false;
3938 /* Forward declarations for some mutually recursive functions. */
3940 static tree
3941 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3942 enum tree_code code2, tree op2a, tree op2b);
3943 static tree
3944 and_var_with_comparison (tree var, bool invert,
3945 enum tree_code code2, tree op2a, tree op2b);
3946 static tree
3947 and_var_with_comparison_1 (gimple stmt,
3948 enum tree_code code2, tree op2a, tree op2b);
3949 static tree
3950 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3951 enum tree_code code2, tree op2a, tree op2b);
3952 static tree
3953 or_var_with_comparison (tree var, bool invert,
3954 enum tree_code code2, tree op2a, tree op2b);
3955 static tree
3956 or_var_with_comparison_1 (gimple stmt,
3957 enum tree_code code2, tree op2a, tree op2b);
3959 /* Helper function for and_comparisons_1: try to simplify the AND of the
3960 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3961 If INVERT is true, invert the value of the VAR before doing the AND.
3962 Return NULL_EXPR if we can't simplify this to a single expression. */
3964 static tree
3965 and_var_with_comparison (tree var, bool invert,
3966 enum tree_code code2, tree op2a, tree op2b)
3968 tree t;
3969 gimple stmt = SSA_NAME_DEF_STMT (var);
3971 /* We can only deal with variables whose definitions are assignments. */
3972 if (!is_gimple_assign (stmt))
3973 return NULL_TREE;
3975 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3976 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3977 Then we only have to consider the simpler non-inverted cases. */
3978 if (invert)
3979 t = or_var_with_comparison_1 (stmt,
3980 invert_tree_comparison (code2, false),
3981 op2a, op2b);
3982 else
3983 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3984 return canonicalize_bool (t, invert);
3987 /* Try to simplify the AND of the ssa variable defined by the assignment
3988 STMT with the comparison specified by (OP2A CODE2 OP2B).
3989 Return NULL_EXPR if we can't simplify this to a single expression. */
3991 static tree
3992 and_var_with_comparison_1 (gimple stmt,
3993 enum tree_code code2, tree op2a, tree op2b)
3995 tree var = gimple_assign_lhs (stmt);
3996 tree true_test_var = NULL_TREE;
3997 tree false_test_var = NULL_TREE;
3998 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4000 /* Check for identities like (var AND (var == 0)) => false. */
4001 if (TREE_CODE (op2a) == SSA_NAME
4002 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4004 if ((code2 == NE_EXPR && integer_zerop (op2b))
4005 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4007 true_test_var = op2a;
4008 if (var == true_test_var)
4009 return var;
4011 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4012 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4014 false_test_var = op2a;
4015 if (var == false_test_var)
4016 return boolean_false_node;
4020 /* If the definition is a comparison, recurse on it. */
4021 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4023 tree t = and_comparisons_1 (innercode,
4024 gimple_assign_rhs1 (stmt),
4025 gimple_assign_rhs2 (stmt),
4026 code2,
4027 op2a,
4028 op2b);
4029 if (t)
4030 return t;
4033 /* If the definition is an AND or OR expression, we may be able to
4034 simplify by reassociating. */
4035 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4036 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4038 tree inner1 = gimple_assign_rhs1 (stmt);
4039 tree inner2 = gimple_assign_rhs2 (stmt);
4040 gimple s;
4041 tree t;
4042 tree partial = NULL_TREE;
4043 bool is_and = (innercode == BIT_AND_EXPR);
4045 /* Check for boolean identities that don't require recursive examination
4046 of inner1/inner2:
4047 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4048 inner1 AND (inner1 OR inner2) => inner1
4049 !inner1 AND (inner1 AND inner2) => false
4050 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4051 Likewise for similar cases involving inner2. */
4052 if (inner1 == true_test_var)
4053 return (is_and ? var : inner1);
4054 else if (inner2 == true_test_var)
4055 return (is_and ? var : inner2);
4056 else if (inner1 == false_test_var)
4057 return (is_and
4058 ? boolean_false_node
4059 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4060 else if (inner2 == false_test_var)
4061 return (is_and
4062 ? boolean_false_node
4063 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4065 /* Next, redistribute/reassociate the AND across the inner tests.
4066 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4067 if (TREE_CODE (inner1) == SSA_NAME
4068 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4069 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4070 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4071 gimple_assign_rhs1 (s),
4072 gimple_assign_rhs2 (s),
4073 code2, op2a, op2b)))
4075 /* Handle the AND case, where we are reassociating:
4076 (inner1 AND inner2) AND (op2a code2 op2b)
4077 => (t AND inner2)
4078 If the partial result t is a constant, we win. Otherwise
4079 continue on to try reassociating with the other inner test. */
4080 if (is_and)
4082 if (integer_onep (t))
4083 return inner2;
4084 else if (integer_zerop (t))
4085 return boolean_false_node;
4088 /* Handle the OR case, where we are redistributing:
4089 (inner1 OR inner2) AND (op2a code2 op2b)
4090 => (t OR (inner2 AND (op2a code2 op2b))) */
4091 else if (integer_onep (t))
4092 return boolean_true_node;
4094 /* Save partial result for later. */
4095 partial = t;
4098 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4099 if (TREE_CODE (inner2) == SSA_NAME
4100 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4101 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4102 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4103 gimple_assign_rhs1 (s),
4104 gimple_assign_rhs2 (s),
4105 code2, op2a, op2b)))
4107 /* Handle the AND case, where we are reassociating:
4108 (inner1 AND inner2) AND (op2a code2 op2b)
4109 => (inner1 AND t) */
4110 if (is_and)
4112 if (integer_onep (t))
4113 return inner1;
4114 else if (integer_zerop (t))
4115 return boolean_false_node;
4116 /* If both are the same, we can apply the identity
4117 (x AND x) == x. */
4118 else if (partial && same_bool_result_p (t, partial))
4119 return t;
4122 /* Handle the OR case. where we are redistributing:
4123 (inner1 OR inner2) AND (op2a code2 op2b)
4124 => (t OR (inner1 AND (op2a code2 op2b)))
4125 => (t OR partial) */
4126 else
4128 if (integer_onep (t))
4129 return boolean_true_node;
4130 else if (partial)
4132 /* We already got a simplification for the other
4133 operand to the redistributed OR expression. The
4134 interesting case is when at least one is false.
4135 Or, if both are the same, we can apply the identity
4136 (x OR x) == x. */
4137 if (integer_zerop (partial))
4138 return t;
4139 else if (integer_zerop (t))
4140 return partial;
4141 else if (same_bool_result_p (t, partial))
4142 return t;
4147 return NULL_TREE;
4150 /* Try to simplify the AND of two comparisons defined by
4151 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4152 If this can be done without constructing an intermediate value,
4153 return the resulting tree; otherwise NULL_TREE is returned.
4154 This function is deliberately asymmetric as it recurses on SSA_DEFs
4155 in the first comparison but not the second. */
4157 static tree
4158 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4159 enum tree_code code2, tree op2a, tree op2b)
4161 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4163 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4164 if (operand_equal_p (op1a, op2a, 0)
4165 && operand_equal_p (op1b, op2b, 0))
4167 /* Result will be either NULL_TREE, or a combined comparison. */
4168 tree t = combine_comparisons (UNKNOWN_LOCATION,
4169 TRUTH_ANDIF_EXPR, code1, code2,
4170 truth_type, op1a, op1b);
4171 if (t)
4172 return t;
4175 /* Likewise the swapped case of the above. */
4176 if (operand_equal_p (op1a, op2b, 0)
4177 && operand_equal_p (op1b, op2a, 0))
4179 /* Result will be either NULL_TREE, or a combined comparison. */
4180 tree t = combine_comparisons (UNKNOWN_LOCATION,
4181 TRUTH_ANDIF_EXPR, code1,
4182 swap_tree_comparison (code2),
4183 truth_type, op1a, op1b);
4184 if (t)
4185 return t;
4188 /* If both comparisons are of the same value against constants, we might
4189 be able to merge them. */
4190 if (operand_equal_p (op1a, op2a, 0)
4191 && TREE_CODE (op1b) == INTEGER_CST
4192 && TREE_CODE (op2b) == INTEGER_CST)
4194 int cmp = tree_int_cst_compare (op1b, op2b);
4196 /* If we have (op1a == op1b), we should either be able to
4197 return that or FALSE, depending on whether the constant op1b
4198 also satisfies the other comparison against op2b. */
4199 if (code1 == EQ_EXPR)
4201 bool done = true;
4202 bool val;
4203 switch (code2)
4205 case EQ_EXPR: val = (cmp == 0); break;
4206 case NE_EXPR: val = (cmp != 0); break;
4207 case LT_EXPR: val = (cmp < 0); break;
4208 case GT_EXPR: val = (cmp > 0); break;
4209 case LE_EXPR: val = (cmp <= 0); break;
4210 case GE_EXPR: val = (cmp >= 0); break;
4211 default: done = false;
4213 if (done)
4215 if (val)
4216 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4217 else
4218 return boolean_false_node;
4221 /* Likewise if the second comparison is an == comparison. */
4222 else if (code2 == EQ_EXPR)
4224 bool done = true;
4225 bool val;
4226 switch (code1)
4228 case EQ_EXPR: val = (cmp == 0); break;
4229 case NE_EXPR: val = (cmp != 0); break;
4230 case LT_EXPR: val = (cmp > 0); break;
4231 case GT_EXPR: val = (cmp < 0); break;
4232 case LE_EXPR: val = (cmp >= 0); break;
4233 case GE_EXPR: val = (cmp <= 0); break;
4234 default: done = false;
4236 if (done)
4238 if (val)
4239 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4240 else
4241 return boolean_false_node;
4245 /* Same business with inequality tests. */
4246 else if (code1 == NE_EXPR)
4248 bool val;
4249 switch (code2)
4251 case EQ_EXPR: val = (cmp != 0); break;
4252 case NE_EXPR: val = (cmp == 0); break;
4253 case LT_EXPR: val = (cmp >= 0); break;
4254 case GT_EXPR: val = (cmp <= 0); break;
4255 case LE_EXPR: val = (cmp > 0); break;
4256 case GE_EXPR: val = (cmp < 0); break;
4257 default:
4258 val = false;
4260 if (val)
4261 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4263 else if (code2 == NE_EXPR)
4265 bool val;
4266 switch (code1)
4268 case EQ_EXPR: val = (cmp == 0); break;
4269 case NE_EXPR: val = (cmp != 0); break;
4270 case LT_EXPR: val = (cmp <= 0); break;
4271 case GT_EXPR: val = (cmp >= 0); break;
4272 case LE_EXPR: val = (cmp < 0); break;
4273 case GE_EXPR: val = (cmp > 0); break;
4274 default:
4275 val = false;
4277 if (val)
4278 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4281 /* Chose the more restrictive of two < or <= comparisons. */
4282 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4283 && (code2 == LT_EXPR || code2 == LE_EXPR))
4285 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4286 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4287 else
4288 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4291 /* Likewise chose the more restrictive of two > or >= comparisons. */
4292 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4293 && (code2 == GT_EXPR || code2 == GE_EXPR))
4295 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4296 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4297 else
4298 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4301 /* Check for singleton ranges. */
4302 else if (cmp == 0
4303 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4304 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4305 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4307 /* Check for disjoint ranges. */
4308 else if (cmp <= 0
4309 && (code1 == LT_EXPR || code1 == LE_EXPR)
4310 && (code2 == GT_EXPR || code2 == GE_EXPR))
4311 return boolean_false_node;
4312 else if (cmp >= 0
4313 && (code1 == GT_EXPR || code1 == GE_EXPR)
4314 && (code2 == LT_EXPR || code2 == LE_EXPR))
4315 return boolean_false_node;
4318 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4319 NAME's definition is a truth value. See if there are any simplifications
4320 that can be done against the NAME's definition. */
4321 if (TREE_CODE (op1a) == SSA_NAME
4322 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4323 && (integer_zerop (op1b) || integer_onep (op1b)))
4325 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4326 || (code1 == NE_EXPR && integer_onep (op1b)));
4327 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4328 switch (gimple_code (stmt))
4330 case GIMPLE_ASSIGN:
4331 /* Try to simplify by copy-propagating the definition. */
4332 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4334 case GIMPLE_PHI:
4335 /* If every argument to the PHI produces the same result when
4336 ANDed with the second comparison, we win.
4337 Do not do this unless the type is bool since we need a bool
4338 result here anyway. */
4339 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4341 tree result = NULL_TREE;
4342 unsigned i;
4343 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4345 tree arg = gimple_phi_arg_def (stmt, i);
4347 /* If this PHI has itself as an argument, ignore it.
4348 If all the other args produce the same result,
4349 we're still OK. */
4350 if (arg == gimple_phi_result (stmt))
4351 continue;
4352 else if (TREE_CODE (arg) == INTEGER_CST)
4354 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4356 if (!result)
4357 result = boolean_false_node;
4358 else if (!integer_zerop (result))
4359 return NULL_TREE;
4361 else if (!result)
4362 result = fold_build2 (code2, boolean_type_node,
4363 op2a, op2b);
4364 else if (!same_bool_comparison_p (result,
4365 code2, op2a, op2b))
4366 return NULL_TREE;
4368 else if (TREE_CODE (arg) == SSA_NAME
4369 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4371 tree temp;
4372 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4373 /* In simple cases we can look through PHI nodes,
4374 but we have to be careful with loops.
4375 See PR49073. */
4376 if (! dom_info_available_p (CDI_DOMINATORS)
4377 || gimple_bb (def_stmt) == gimple_bb (stmt)
4378 || dominated_by_p (CDI_DOMINATORS,
4379 gimple_bb (def_stmt),
4380 gimple_bb (stmt)))
4381 return NULL_TREE;
4382 temp = and_var_with_comparison (arg, invert, code2,
4383 op2a, op2b);
4384 if (!temp)
4385 return NULL_TREE;
4386 else if (!result)
4387 result = temp;
4388 else if (!same_bool_result_p (result, temp))
4389 return NULL_TREE;
4391 else
4392 return NULL_TREE;
4394 return result;
4397 default:
4398 break;
4401 return NULL_TREE;
4404 /* Try to simplify the AND of two comparisons, specified by
4405 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4406 If this can be simplified to a single expression (without requiring
4407 introducing more SSA variables to hold intermediate values),
4408 return the resulting tree. Otherwise return NULL_TREE.
4409 If the result expression is non-null, it has boolean type. */
4411 tree
4412 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4413 enum tree_code code2, tree op2a, tree op2b)
4415 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4416 if (t)
4417 return t;
4418 else
4419 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4422 /* Helper function for or_comparisons_1: try to simplify the OR of the
4423 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4424 If INVERT is true, invert the value of VAR before doing the OR.
4425 Return NULL_EXPR if we can't simplify this to a single expression. */
4427 static tree
4428 or_var_with_comparison (tree var, bool invert,
4429 enum tree_code code2, tree op2a, tree op2b)
4431 tree t;
4432 gimple stmt = SSA_NAME_DEF_STMT (var);
4434 /* We can only deal with variables whose definitions are assignments. */
4435 if (!is_gimple_assign (stmt))
4436 return NULL_TREE;
4438 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4439 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4440 Then we only have to consider the simpler non-inverted cases. */
4441 if (invert)
4442 t = and_var_with_comparison_1 (stmt,
4443 invert_tree_comparison (code2, false),
4444 op2a, op2b);
4445 else
4446 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4447 return canonicalize_bool (t, invert);
4450 /* Try to simplify the OR of the ssa variable defined by the assignment
4451 STMT with the comparison specified by (OP2A CODE2 OP2B).
4452 Return NULL_EXPR if we can't simplify this to a single expression. */
4454 static tree
4455 or_var_with_comparison_1 (gimple stmt,
4456 enum tree_code code2, tree op2a, tree op2b)
4458 tree var = gimple_assign_lhs (stmt);
4459 tree true_test_var = NULL_TREE;
4460 tree false_test_var = NULL_TREE;
4461 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4463 /* Check for identities like (var OR (var != 0)) => true . */
4464 if (TREE_CODE (op2a) == SSA_NAME
4465 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4467 if ((code2 == NE_EXPR && integer_zerop (op2b))
4468 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4470 true_test_var = op2a;
4471 if (var == true_test_var)
4472 return var;
4474 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4475 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4477 false_test_var = op2a;
4478 if (var == false_test_var)
4479 return boolean_true_node;
4483 /* If the definition is a comparison, recurse on it. */
4484 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4486 tree t = or_comparisons_1 (innercode,
4487 gimple_assign_rhs1 (stmt),
4488 gimple_assign_rhs2 (stmt),
4489 code2,
4490 op2a,
4491 op2b);
4492 if (t)
4493 return t;
4496 /* If the definition is an AND or OR expression, we may be able to
4497 simplify by reassociating. */
4498 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4499 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4501 tree inner1 = gimple_assign_rhs1 (stmt);
4502 tree inner2 = gimple_assign_rhs2 (stmt);
4503 gimple s;
4504 tree t;
4505 tree partial = NULL_TREE;
4506 bool is_or = (innercode == BIT_IOR_EXPR);
4508 /* Check for boolean identities that don't require recursive examination
4509 of inner1/inner2:
4510 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4511 inner1 OR (inner1 AND inner2) => inner1
4512 !inner1 OR (inner1 OR inner2) => true
4513 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4515 if (inner1 == true_test_var)
4516 return (is_or ? var : inner1);
4517 else if (inner2 == true_test_var)
4518 return (is_or ? var : inner2);
4519 else if (inner1 == false_test_var)
4520 return (is_or
4521 ? boolean_true_node
4522 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4523 else if (inner2 == false_test_var)
4524 return (is_or
4525 ? boolean_true_node
4526 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4528 /* Next, redistribute/reassociate the OR across the inner tests.
4529 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4530 if (TREE_CODE (inner1) == SSA_NAME
4531 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4532 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4533 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4534 gimple_assign_rhs1 (s),
4535 gimple_assign_rhs2 (s),
4536 code2, op2a, op2b)))
4538 /* Handle the OR case, where we are reassociating:
4539 (inner1 OR inner2) OR (op2a code2 op2b)
4540 => (t OR inner2)
4541 If the partial result t is a constant, we win. Otherwise
4542 continue on to try reassociating with the other inner test. */
4543 if (is_or)
4545 if (integer_onep (t))
4546 return boolean_true_node;
4547 else if (integer_zerop (t))
4548 return inner2;
4551 /* Handle the AND case, where we are redistributing:
4552 (inner1 AND inner2) OR (op2a code2 op2b)
4553 => (t AND (inner2 OR (op2a code op2b))) */
4554 else if (integer_zerop (t))
4555 return boolean_false_node;
4557 /* Save partial result for later. */
4558 partial = t;
4561 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4562 if (TREE_CODE (inner2) == SSA_NAME
4563 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4564 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4565 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4566 gimple_assign_rhs1 (s),
4567 gimple_assign_rhs2 (s),
4568 code2, op2a, op2b)))
4570 /* Handle the OR case, where we are reassociating:
4571 (inner1 OR inner2) OR (op2a code2 op2b)
4572 => (inner1 OR t)
4573 => (t OR partial) */
4574 if (is_or)
4576 if (integer_zerop (t))
4577 return inner1;
4578 else if (integer_onep (t))
4579 return boolean_true_node;
4580 /* If both are the same, we can apply the identity
4581 (x OR x) == x. */
4582 else if (partial && same_bool_result_p (t, partial))
4583 return t;
4586 /* Handle the AND case, where we are redistributing:
4587 (inner1 AND inner2) OR (op2a code2 op2b)
4588 => (t AND (inner1 OR (op2a code2 op2b)))
4589 => (t AND partial) */
4590 else
4592 if (integer_zerop (t))
4593 return boolean_false_node;
4594 else if (partial)
4596 /* We already got a simplification for the other
4597 operand to the redistributed AND expression. The
4598 interesting case is when at least one is true.
4599 Or, if both are the same, we can apply the identity
4600 (x AND x) == x. */
4601 if (integer_onep (partial))
4602 return t;
4603 else if (integer_onep (t))
4604 return partial;
4605 else if (same_bool_result_p (t, partial))
4606 return t;
4611 return NULL_TREE;
4614 /* Try to simplify the OR of two comparisons defined by
4615 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4616 If this can be done without constructing an intermediate value,
4617 return the resulting tree; otherwise NULL_TREE is returned.
4618 This function is deliberately asymmetric as it recurses on SSA_DEFs
4619 in the first comparison but not the second. */
4621 static tree
4622 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4623 enum tree_code code2, tree op2a, tree op2b)
4625 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4627 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4628 if (operand_equal_p (op1a, op2a, 0)
4629 && operand_equal_p (op1b, op2b, 0))
4631 /* Result will be either NULL_TREE, or a combined comparison. */
4632 tree t = combine_comparisons (UNKNOWN_LOCATION,
4633 TRUTH_ORIF_EXPR, code1, code2,
4634 truth_type, op1a, op1b);
4635 if (t)
4636 return t;
4639 /* Likewise the swapped case of the above. */
4640 if (operand_equal_p (op1a, op2b, 0)
4641 && operand_equal_p (op1b, op2a, 0))
4643 /* Result will be either NULL_TREE, or a combined comparison. */
4644 tree t = combine_comparisons (UNKNOWN_LOCATION,
4645 TRUTH_ORIF_EXPR, code1,
4646 swap_tree_comparison (code2),
4647 truth_type, op1a, op1b);
4648 if (t)
4649 return t;
4652 /* If both comparisons are of the same value against constants, we might
4653 be able to merge them. */
4654 if (operand_equal_p (op1a, op2a, 0)
4655 && TREE_CODE (op1b) == INTEGER_CST
4656 && TREE_CODE (op2b) == INTEGER_CST)
4658 int cmp = tree_int_cst_compare (op1b, op2b);
4660 /* If we have (op1a != op1b), we should either be able to
4661 return that or TRUE, depending on whether the constant op1b
4662 also satisfies the other comparison against op2b. */
4663 if (code1 == NE_EXPR)
4665 bool done = true;
4666 bool val;
4667 switch (code2)
4669 case EQ_EXPR: val = (cmp == 0); break;
4670 case NE_EXPR: val = (cmp != 0); break;
4671 case LT_EXPR: val = (cmp < 0); break;
4672 case GT_EXPR: val = (cmp > 0); break;
4673 case LE_EXPR: val = (cmp <= 0); break;
4674 case GE_EXPR: val = (cmp >= 0); break;
4675 default: done = false;
4677 if (done)
4679 if (val)
4680 return boolean_true_node;
4681 else
4682 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4685 /* Likewise if the second comparison is a != comparison. */
4686 else if (code2 == NE_EXPR)
4688 bool done = true;
4689 bool val;
4690 switch (code1)
4692 case EQ_EXPR: val = (cmp == 0); break;
4693 case NE_EXPR: val = (cmp != 0); break;
4694 case LT_EXPR: val = (cmp > 0); break;
4695 case GT_EXPR: val = (cmp < 0); break;
4696 case LE_EXPR: val = (cmp >= 0); break;
4697 case GE_EXPR: val = (cmp <= 0); break;
4698 default: done = false;
4700 if (done)
4702 if (val)
4703 return boolean_true_node;
4704 else
4705 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4709 /* See if an equality test is redundant with the other comparison. */
4710 else if (code1 == EQ_EXPR)
4712 bool val;
4713 switch (code2)
4715 case EQ_EXPR: val = (cmp == 0); break;
4716 case NE_EXPR: val = (cmp != 0); break;
4717 case LT_EXPR: val = (cmp < 0); break;
4718 case GT_EXPR: val = (cmp > 0); break;
4719 case LE_EXPR: val = (cmp <= 0); break;
4720 case GE_EXPR: val = (cmp >= 0); break;
4721 default:
4722 val = false;
4724 if (val)
4725 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4727 else if (code2 == EQ_EXPR)
4729 bool val;
4730 switch (code1)
4732 case EQ_EXPR: val = (cmp == 0); break;
4733 case NE_EXPR: val = (cmp != 0); break;
4734 case LT_EXPR: val = (cmp > 0); break;
4735 case GT_EXPR: val = (cmp < 0); break;
4736 case LE_EXPR: val = (cmp >= 0); break;
4737 case GE_EXPR: val = (cmp <= 0); break;
4738 default:
4739 val = false;
4741 if (val)
4742 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4745 /* Chose the less restrictive of two < or <= comparisons. */
4746 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4747 && (code2 == LT_EXPR || code2 == LE_EXPR))
4749 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4750 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4751 else
4752 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4755 /* Likewise chose the less restrictive of two > or >= comparisons. */
4756 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4757 && (code2 == GT_EXPR || code2 == GE_EXPR))
4759 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4760 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4761 else
4762 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4765 /* Check for singleton ranges. */
4766 else if (cmp == 0
4767 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4768 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4769 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4771 /* Check for less/greater pairs that don't restrict the range at all. */
4772 else if (cmp >= 0
4773 && (code1 == LT_EXPR || code1 == LE_EXPR)
4774 && (code2 == GT_EXPR || code2 == GE_EXPR))
4775 return boolean_true_node;
4776 else if (cmp <= 0
4777 && (code1 == GT_EXPR || code1 == GE_EXPR)
4778 && (code2 == LT_EXPR || code2 == LE_EXPR))
4779 return boolean_true_node;
4782 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4783 NAME's definition is a truth value. See if there are any simplifications
4784 that can be done against the NAME's definition. */
4785 if (TREE_CODE (op1a) == SSA_NAME
4786 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4787 && (integer_zerop (op1b) || integer_onep (op1b)))
4789 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4790 || (code1 == NE_EXPR && integer_onep (op1b)));
4791 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4792 switch (gimple_code (stmt))
4794 case GIMPLE_ASSIGN:
4795 /* Try to simplify by copy-propagating the definition. */
4796 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4798 case GIMPLE_PHI:
4799 /* If every argument to the PHI produces the same result when
4800 ORed with the second comparison, we win.
4801 Do not do this unless the type is bool since we need a bool
4802 result here anyway. */
4803 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4805 tree result = NULL_TREE;
4806 unsigned i;
4807 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4809 tree arg = gimple_phi_arg_def (stmt, i);
4811 /* If this PHI has itself as an argument, ignore it.
4812 If all the other args produce the same result,
4813 we're still OK. */
4814 if (arg == gimple_phi_result (stmt))
4815 continue;
4816 else if (TREE_CODE (arg) == INTEGER_CST)
4818 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4820 if (!result)
4821 result = boolean_true_node;
4822 else if (!integer_onep (result))
4823 return NULL_TREE;
4825 else if (!result)
4826 result = fold_build2 (code2, boolean_type_node,
4827 op2a, op2b);
4828 else if (!same_bool_comparison_p (result,
4829 code2, op2a, op2b))
4830 return NULL_TREE;
4832 else if (TREE_CODE (arg) == SSA_NAME
4833 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4835 tree temp;
4836 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4837 /* In simple cases we can look through PHI nodes,
4838 but we have to be careful with loops.
4839 See PR49073. */
4840 if (! dom_info_available_p (CDI_DOMINATORS)
4841 || gimple_bb (def_stmt) == gimple_bb (stmt)
4842 || dominated_by_p (CDI_DOMINATORS,
4843 gimple_bb (def_stmt),
4844 gimple_bb (stmt)))
4845 return NULL_TREE;
4846 temp = or_var_with_comparison (arg, invert, code2,
4847 op2a, op2b);
4848 if (!temp)
4849 return NULL_TREE;
4850 else if (!result)
4851 result = temp;
4852 else if (!same_bool_result_p (result, temp))
4853 return NULL_TREE;
4855 else
4856 return NULL_TREE;
4858 return result;
4861 default:
4862 break;
4865 return NULL_TREE;
4868 /* Try to simplify the OR of two comparisons, specified by
4869 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4870 If this can be simplified to a single expression (without requiring
4871 introducing more SSA variables to hold intermediate values),
4872 return the resulting tree. Otherwise return NULL_TREE.
4873 If the result expression is non-null, it has boolean type. */
4875 tree
4876 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4877 enum tree_code code2, tree op2a, tree op2b)
4879 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4880 if (t)
4881 return t;
4882 else
4883 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4887 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4889 Either NULL_TREE, a simplified but non-constant or a constant
4890 is returned.
4892 ??? This should go into a gimple-fold-inline.h file to be eventually
4893 privatized with the single valueize function used in the various TUs
4894 to avoid the indirect function call overhead. */
4896 tree
4897 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4898 tree (*gvalueize) (tree))
4900 code_helper rcode;
4901 tree ops[3] = {};
4902 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4903 edges if there are intermediate VARYING defs. For this reason
4904 do not follow SSA edges here even though SCCVN can technically
4905 just deal fine with that. */
4906 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize)
4907 && rcode.is_tree_code ()
4908 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4909 || ((tree_code) rcode) == ADDR_EXPR)
4910 && is_gimple_val (ops[0]))
4912 tree res = ops[0];
4913 if (dump_file && dump_flags & TDF_DETAILS)
4915 fprintf (dump_file, "Match-and-simplified ");
4916 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4917 fprintf (dump_file, " to ");
4918 print_generic_expr (dump_file, res, 0);
4919 fprintf (dump_file, "\n");
4921 return res;
4924 location_t loc = gimple_location (stmt);
4925 switch (gimple_code (stmt))
4927 case GIMPLE_ASSIGN:
4929 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4931 switch (get_gimple_rhs_class (subcode))
4933 case GIMPLE_SINGLE_RHS:
4935 tree rhs = gimple_assign_rhs1 (stmt);
4936 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4938 if (TREE_CODE (rhs) == SSA_NAME)
4940 /* If the RHS is an SSA_NAME, return its known constant value,
4941 if any. */
4942 return (*valueize) (rhs);
4944 /* Handle propagating invariant addresses into address
4945 operations. */
4946 else if (TREE_CODE (rhs) == ADDR_EXPR
4947 && !is_gimple_min_invariant (rhs))
4949 HOST_WIDE_INT offset = 0;
4950 tree base;
4951 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4952 &offset,
4953 valueize);
4954 if (base
4955 && (CONSTANT_CLASS_P (base)
4956 || decl_address_invariant_p (base)))
4957 return build_invariant_address (TREE_TYPE (rhs),
4958 base, offset);
4960 else if (TREE_CODE (rhs) == CONSTRUCTOR
4961 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4962 && (CONSTRUCTOR_NELTS (rhs)
4963 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4965 unsigned i;
4966 tree val, *vec;
4968 vec = XALLOCAVEC (tree,
4969 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4970 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4972 val = (*valueize) (val);
4973 if (TREE_CODE (val) == INTEGER_CST
4974 || TREE_CODE (val) == REAL_CST
4975 || TREE_CODE (val) == FIXED_CST)
4976 vec[i] = val;
4977 else
4978 return NULL_TREE;
4981 return build_vector (TREE_TYPE (rhs), vec);
4983 if (subcode == OBJ_TYPE_REF)
4985 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4986 /* If callee is constant, we can fold away the wrapper. */
4987 if (is_gimple_min_invariant (val))
4988 return val;
4991 if (kind == tcc_reference)
4993 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4994 || TREE_CODE (rhs) == REALPART_EXPR
4995 || TREE_CODE (rhs) == IMAGPART_EXPR)
4996 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4998 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4999 return fold_unary_loc (EXPR_LOCATION (rhs),
5000 TREE_CODE (rhs),
5001 TREE_TYPE (rhs), val);
5003 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5004 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5006 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5007 return fold_ternary_loc (EXPR_LOCATION (rhs),
5008 TREE_CODE (rhs),
5009 TREE_TYPE (rhs), val,
5010 TREE_OPERAND (rhs, 1),
5011 TREE_OPERAND (rhs, 2));
5013 else if (TREE_CODE (rhs) == MEM_REF
5014 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5016 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5017 if (TREE_CODE (val) == ADDR_EXPR
5018 && is_gimple_min_invariant (val))
5020 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5021 unshare_expr (val),
5022 TREE_OPERAND (rhs, 1));
5023 if (tem)
5024 rhs = tem;
5027 return fold_const_aggregate_ref_1 (rhs, valueize);
5029 else if (kind == tcc_declaration)
5030 return get_symbol_constant_value (rhs);
5031 return rhs;
5034 case GIMPLE_UNARY_RHS:
5035 return NULL_TREE;
5037 case GIMPLE_BINARY_RHS:
5039 /* Handle binary operators that can appear in GIMPLE form. */
5040 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5041 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5043 /* Translate &x + CST into an invariant form suitable for
5044 further propagation. */
5045 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
5046 && TREE_CODE (op0) == ADDR_EXPR
5047 && TREE_CODE (op1) == INTEGER_CST)
5049 tree off = fold_convert (ptr_type_node, op1);
5050 return build_fold_addr_expr_loc
5051 (loc,
5052 fold_build2 (MEM_REF,
5053 TREE_TYPE (TREE_TYPE (op0)),
5054 unshare_expr (op0), off));
5057 return fold_binary_loc (loc, subcode,
5058 gimple_expr_type (stmt), op0, op1);
5061 case GIMPLE_TERNARY_RHS:
5063 /* Handle ternary operators that can appear in GIMPLE form. */
5064 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5065 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5066 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5068 /* Fold embedded expressions in ternary codes. */
5069 if ((subcode == COND_EXPR
5070 || subcode == VEC_COND_EXPR)
5071 && COMPARISON_CLASS_P (op0))
5073 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
5074 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
5075 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
5076 TREE_TYPE (op0), op00, op01);
5077 if (tem)
5078 op0 = tem;
5081 return fold_ternary_loc (loc, subcode,
5082 gimple_expr_type (stmt), op0, op1, op2);
5085 default:
5086 gcc_unreachable ();
5090 case GIMPLE_CALL:
5092 tree fn;
5093 gcall *call_stmt = as_a <gcall *> (stmt);
5095 if (gimple_call_internal_p (stmt))
5097 enum tree_code subcode = ERROR_MARK;
5098 switch (gimple_call_internal_fn (stmt))
5100 case IFN_UBSAN_CHECK_ADD:
5101 subcode = PLUS_EXPR;
5102 break;
5103 case IFN_UBSAN_CHECK_SUB:
5104 subcode = MINUS_EXPR;
5105 break;
5106 case IFN_UBSAN_CHECK_MUL:
5107 subcode = MULT_EXPR;
5108 break;
5109 default:
5110 return NULL_TREE;
5112 tree arg0 = gimple_call_arg (stmt, 0);
5113 tree arg1 = gimple_call_arg (stmt, 1);
5114 tree op0 = (*valueize) (arg0);
5115 tree op1 = (*valueize) (arg1);
5117 if (TREE_CODE (op0) != INTEGER_CST
5118 || TREE_CODE (op1) != INTEGER_CST)
5120 switch (subcode)
5122 case MULT_EXPR:
5123 /* x * 0 = 0 * x = 0 without overflow. */
5124 if (integer_zerop (op0) || integer_zerop (op1))
5125 return build_zero_cst (TREE_TYPE (arg0));
5126 break;
5127 case MINUS_EXPR:
5128 /* y - y = 0 without overflow. */
5129 if (operand_equal_p (op0, op1, 0))
5130 return build_zero_cst (TREE_TYPE (arg0));
5131 break;
5132 default:
5133 break;
5136 tree res
5137 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5138 if (res
5139 && TREE_CODE (res) == INTEGER_CST
5140 && !TREE_OVERFLOW (res))
5141 return res;
5142 return NULL_TREE;
5145 fn = (*valueize) (gimple_call_fn (stmt));
5146 if (TREE_CODE (fn) == ADDR_EXPR
5147 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5148 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5149 && gimple_builtin_call_types_compatible_p (stmt,
5150 TREE_OPERAND (fn, 0)))
5152 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5153 tree retval;
5154 unsigned i;
5155 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5156 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5157 retval = fold_builtin_call_array (loc,
5158 gimple_call_return_type (call_stmt),
5159 fn, gimple_call_num_args (stmt), args);
5160 if (retval)
5162 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5163 STRIP_NOPS (retval);
5164 retval = fold_convert (gimple_call_return_type (call_stmt),
5165 retval);
5167 return retval;
5169 return NULL_TREE;
5172 default:
5173 return NULL_TREE;
5177 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5178 Returns NULL_TREE if folding to a constant is not possible, otherwise
5179 returns a constant according to is_gimple_min_invariant. */
5181 tree
5182 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
5184 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5185 if (res && is_gimple_min_invariant (res))
5186 return res;
5187 return NULL_TREE;
5191 /* The following set of functions are supposed to fold references using
5192 their constant initializers. */
5194 /* See if we can find constructor defining value of BASE.
5195 When we know the consructor with constant offset (such as
5196 base is array[40] and we do know constructor of array), then
5197 BIT_OFFSET is adjusted accordingly.
5199 As a special case, return error_mark_node when constructor
5200 is not explicitly available, but it is known to be zero
5201 such as 'static const int a;'. */
5202 static tree
5203 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5204 tree (*valueize)(tree))
5206 HOST_WIDE_INT bit_offset2, size, max_size;
5207 if (TREE_CODE (base) == MEM_REF)
5209 if (!integer_zerop (TREE_OPERAND (base, 1)))
5211 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5212 return NULL_TREE;
5213 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5214 * BITS_PER_UNIT);
5217 if (valueize
5218 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5219 base = valueize (TREE_OPERAND (base, 0));
5220 if (!base || TREE_CODE (base) != ADDR_EXPR)
5221 return NULL_TREE;
5222 base = TREE_OPERAND (base, 0);
5225 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5226 DECL_INITIAL. If BASE is a nested reference into another
5227 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5228 the inner reference. */
5229 switch (TREE_CODE (base))
5231 case VAR_DECL:
5232 case CONST_DECL:
5234 tree init = ctor_for_folding (base);
5236 /* Our semantic is exact opposite of ctor_for_folding;
5237 NULL means unknown, while error_mark_node is 0. */
5238 if (init == error_mark_node)
5239 return NULL_TREE;
5240 if (!init)
5241 return error_mark_node;
5242 return init;
5245 case ARRAY_REF:
5246 case COMPONENT_REF:
5247 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5248 if (max_size == -1 || size != max_size)
5249 return NULL_TREE;
5250 *bit_offset += bit_offset2;
5251 return get_base_constructor (base, bit_offset, valueize);
5253 case STRING_CST:
5254 case CONSTRUCTOR:
5255 return base;
5257 default:
5258 return NULL_TREE;
5262 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5263 SIZE to the memory at bit OFFSET. */
5265 static tree
5266 fold_array_ctor_reference (tree type, tree ctor,
5267 unsigned HOST_WIDE_INT offset,
5268 unsigned HOST_WIDE_INT size,
5269 tree from_decl)
5271 unsigned HOST_WIDE_INT cnt;
5272 tree cfield, cval;
5273 offset_int low_bound;
5274 offset_int elt_size;
5275 offset_int index, max_index;
5276 offset_int access_index;
5277 tree domain_type = NULL_TREE, index_type = NULL_TREE;
5278 HOST_WIDE_INT inner_offset;
5280 /* Compute low bound and elt size. */
5281 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5282 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5283 if (domain_type && TYPE_MIN_VALUE (domain_type))
5285 /* Static constructors for variably sized objects makes no sense. */
5286 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5287 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5288 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5290 else
5291 low_bound = 0;
5292 /* Static constructors for variably sized objects makes no sense. */
5293 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5294 == INTEGER_CST);
5295 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5297 /* We can handle only constantly sized accesses that are known to not
5298 be larger than size of array element. */
5299 if (!TYPE_SIZE_UNIT (type)
5300 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5301 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5302 || elt_size == 0)
5303 return NULL_TREE;
5305 /* Compute the array index we look for. */
5306 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5307 elt_size);
5308 access_index += low_bound;
5309 if (index_type)
5310 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5311 TYPE_SIGN (index_type));
5313 /* And offset within the access. */
5314 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5316 /* See if the array field is large enough to span whole access. We do not
5317 care to fold accesses spanning multiple array indexes. */
5318 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5319 return NULL_TREE;
5321 index = low_bound - 1;
5322 if (index_type)
5323 index = wi::ext (index, TYPE_PRECISION (index_type),
5324 TYPE_SIGN (index_type));
5326 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5328 /* Array constructor might explicitely set index, or specify range
5329 or leave index NULL meaning that it is next index after previous
5330 one. */
5331 if (cfield)
5333 if (TREE_CODE (cfield) == INTEGER_CST)
5334 max_index = index = wi::to_offset (cfield);
5335 else
5337 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5338 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5339 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
5342 else
5344 index += 1;
5345 if (index_type)
5346 index = wi::ext (index, TYPE_PRECISION (index_type),
5347 TYPE_SIGN (index_type));
5348 max_index = index;
5351 /* Do we have match? */
5352 if (wi::cmpu (access_index, index) >= 0
5353 && wi::cmpu (access_index, max_index) <= 0)
5354 return fold_ctor_reference (type, cval, inner_offset, size,
5355 from_decl);
5357 /* When memory is not explicitely mentioned in constructor,
5358 it is 0 (or out of range). */
5359 return build_zero_cst (type);
5362 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5363 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5365 static tree
5366 fold_nonarray_ctor_reference (tree type, tree ctor,
5367 unsigned HOST_WIDE_INT offset,
5368 unsigned HOST_WIDE_INT size,
5369 tree from_decl)
5371 unsigned HOST_WIDE_INT cnt;
5372 tree cfield, cval;
5374 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5375 cval)
5377 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5378 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5379 tree field_size = DECL_SIZE (cfield);
5380 offset_int bitoffset;
5381 offset_int bitoffset_end, access_end;
5383 /* Variable sized objects in static constructors makes no sense,
5384 but field_size can be NULL for flexible array members. */
5385 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5386 && TREE_CODE (byte_offset) == INTEGER_CST
5387 && (field_size != NULL_TREE
5388 ? TREE_CODE (field_size) == INTEGER_CST
5389 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5391 /* Compute bit offset of the field. */
5392 bitoffset = (wi::to_offset (field_offset)
5393 + wi::lshift (wi::to_offset (byte_offset),
5394 LOG2_BITS_PER_UNIT));
5395 /* Compute bit offset where the field ends. */
5396 if (field_size != NULL_TREE)
5397 bitoffset_end = bitoffset + wi::to_offset (field_size);
5398 else
5399 bitoffset_end = 0;
5401 access_end = offset_int (offset) + size;
5403 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5404 [BITOFFSET, BITOFFSET_END)? */
5405 if (wi::cmps (access_end, bitoffset) > 0
5406 && (field_size == NULL_TREE
5407 || wi::lts_p (offset, bitoffset_end)))
5409 offset_int inner_offset = offset_int (offset) - bitoffset;
5410 /* We do have overlap. Now see if field is large enough to
5411 cover the access. Give up for accesses spanning multiple
5412 fields. */
5413 if (wi::cmps (access_end, bitoffset_end) > 0)
5414 return NULL_TREE;
5415 if (wi::lts_p (offset, bitoffset))
5416 return NULL_TREE;
5417 return fold_ctor_reference (type, cval,
5418 inner_offset.to_uhwi (), size,
5419 from_decl);
5422 /* When memory is not explicitely mentioned in constructor, it is 0. */
5423 return build_zero_cst (type);
5426 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5427 to the memory at bit OFFSET. */
5429 tree
5430 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5431 unsigned HOST_WIDE_INT size, tree from_decl)
5433 tree ret;
5435 /* We found the field with exact match. */
5436 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5437 && !offset)
5438 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5440 /* We are at the end of walk, see if we can view convert the
5441 result. */
5442 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5443 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5444 && !compare_tree_int (TYPE_SIZE (type), size)
5445 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5447 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5448 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5449 if (ret)
5450 STRIP_USELESS_TYPE_CONVERSION (ret);
5451 return ret;
5453 /* For constants and byte-aligned/sized reads try to go through
5454 native_encode/interpret. */
5455 if (CONSTANT_CLASS_P (ctor)
5456 && BITS_PER_UNIT == 8
5457 && offset % BITS_PER_UNIT == 0
5458 && size % BITS_PER_UNIT == 0
5459 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5461 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5462 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5463 offset / BITS_PER_UNIT) > 0)
5464 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5466 if (TREE_CODE (ctor) == CONSTRUCTOR)
5469 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5470 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5471 return fold_array_ctor_reference (type, ctor, offset, size,
5472 from_decl);
5473 else
5474 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5475 from_decl);
5478 return NULL_TREE;
5481 /* Return the tree representing the element referenced by T if T is an
5482 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5483 names using VALUEIZE. Return NULL_TREE otherwise. */
5485 tree
5486 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5488 tree ctor, idx, base;
5489 HOST_WIDE_INT offset, size, max_size;
5490 tree tem;
5492 if (TREE_THIS_VOLATILE (t))
5493 return NULL_TREE;
5495 if (DECL_P (t))
5496 return get_symbol_constant_value (t);
5498 tem = fold_read_from_constant_string (t);
5499 if (tem)
5500 return tem;
5502 switch (TREE_CODE (t))
5504 case ARRAY_REF:
5505 case ARRAY_RANGE_REF:
5506 /* Constant indexes are handled well by get_base_constructor.
5507 Only special case variable offsets.
5508 FIXME: This code can't handle nested references with variable indexes
5509 (they will be handled only by iteration of ccp). Perhaps we can bring
5510 get_ref_base_and_extent here and make it use a valueize callback. */
5511 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5512 && valueize
5513 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5514 && TREE_CODE (idx) == INTEGER_CST)
5516 tree low_bound, unit_size;
5518 /* If the resulting bit-offset is constant, track it. */
5519 if ((low_bound = array_ref_low_bound (t),
5520 TREE_CODE (low_bound) == INTEGER_CST)
5521 && (unit_size = array_ref_element_size (t),
5522 tree_fits_uhwi_p (unit_size)))
5524 offset_int woffset
5525 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5526 TYPE_PRECISION (TREE_TYPE (idx)));
5528 if (wi::fits_shwi_p (woffset))
5530 offset = woffset.to_shwi ();
5531 /* TODO: This code seems wrong, multiply then check
5532 to see if it fits. */
5533 offset *= tree_to_uhwi (unit_size);
5534 offset *= BITS_PER_UNIT;
5536 base = TREE_OPERAND (t, 0);
5537 ctor = get_base_constructor (base, &offset, valueize);
5538 /* Empty constructor. Always fold to 0. */
5539 if (ctor == error_mark_node)
5540 return build_zero_cst (TREE_TYPE (t));
5541 /* Out of bound array access. Value is undefined,
5542 but don't fold. */
5543 if (offset < 0)
5544 return NULL_TREE;
5545 /* We can not determine ctor. */
5546 if (!ctor)
5547 return NULL_TREE;
5548 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5549 tree_to_uhwi (unit_size)
5550 * BITS_PER_UNIT,
5551 base);
5555 /* Fallthru. */
5557 case COMPONENT_REF:
5558 case BIT_FIELD_REF:
5559 case TARGET_MEM_REF:
5560 case MEM_REF:
5561 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5562 ctor = get_base_constructor (base, &offset, valueize);
5564 /* Empty constructor. Always fold to 0. */
5565 if (ctor == error_mark_node)
5566 return build_zero_cst (TREE_TYPE (t));
5567 /* We do not know precise address. */
5568 if (max_size == -1 || max_size != size)
5569 return NULL_TREE;
5570 /* We can not determine ctor. */
5571 if (!ctor)
5572 return NULL_TREE;
5574 /* Out of bound array access. Value is undefined, but don't fold. */
5575 if (offset < 0)
5576 return NULL_TREE;
5578 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5579 base);
5581 case REALPART_EXPR:
5582 case IMAGPART_EXPR:
5584 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5585 if (c && TREE_CODE (c) == COMPLEX_CST)
5586 return fold_build1_loc (EXPR_LOCATION (t),
5587 TREE_CODE (t), TREE_TYPE (t), c);
5588 break;
5591 default:
5592 break;
5595 return NULL_TREE;
5598 tree
5599 fold_const_aggregate_ref (tree t)
5601 return fold_const_aggregate_ref_1 (t, NULL);
5604 /* Lookup virtual method with index TOKEN in a virtual table V
5605 at OFFSET.
5606 Set CAN_REFER if non-NULL to false if method
5607 is not referable or if the virtual table is ill-formed (such as rewriten
5608 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5610 tree
5611 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5612 tree v,
5613 unsigned HOST_WIDE_INT offset,
5614 bool *can_refer)
5616 tree vtable = v, init, fn;
5617 unsigned HOST_WIDE_INT size;
5618 unsigned HOST_WIDE_INT elt_size, access_index;
5619 tree domain_type;
5621 if (can_refer)
5622 *can_refer = true;
5624 /* First of all double check we have virtual table. */
5625 if (TREE_CODE (v) != VAR_DECL
5626 || !DECL_VIRTUAL_P (v))
5628 /* Pass down that we lost track of the target. */
5629 if (can_refer)
5630 *can_refer = false;
5631 return NULL_TREE;
5634 init = ctor_for_folding (v);
5636 /* The virtual tables should always be born with constructors
5637 and we always should assume that they are avaialble for
5638 folding. At the moment we do not stream them in all cases,
5639 but it should never happen that ctor seem unreachable. */
5640 gcc_assert (init);
5641 if (init == error_mark_node)
5643 gcc_assert (in_lto_p);
5644 /* Pass down that we lost track of the target. */
5645 if (can_refer)
5646 *can_refer = false;
5647 return NULL_TREE;
5649 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5650 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5651 offset *= BITS_PER_UNIT;
5652 offset += token * size;
5654 /* Lookup the value in the constructor that is assumed to be array.
5655 This is equivalent to
5656 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5657 offset, size, NULL);
5658 but in a constant time. We expect that frontend produced a simple
5659 array without indexed initializers. */
5661 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5662 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5663 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5664 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5666 access_index = offset / BITS_PER_UNIT / elt_size;
5667 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5669 /* This code makes an assumption that there are no
5670 indexed fileds produced by C++ FE, so we can directly index the array. */
5671 if (access_index < CONSTRUCTOR_NELTS (init))
5673 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5674 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5675 STRIP_NOPS (fn);
5677 else
5678 fn = NULL;
5680 /* For type inconsistent program we may end up looking up virtual method
5681 in virtual table that does not contain TOKEN entries. We may overrun
5682 the virtual table and pick up a constant or RTTI info pointer.
5683 In any case the call is undefined. */
5684 if (!fn
5685 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5686 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5687 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5688 else
5690 fn = TREE_OPERAND (fn, 0);
5692 /* When cgraph node is missing and function is not public, we cannot
5693 devirtualize. This can happen in WHOPR when the actual method
5694 ends up in other partition, because we found devirtualization
5695 possibility too late. */
5696 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5698 if (can_refer)
5700 *can_refer = false;
5701 return fn;
5703 return NULL_TREE;
5707 /* Make sure we create a cgraph node for functions we'll reference.
5708 They can be non-existent if the reference comes from an entry
5709 of an external vtable for example. */
5710 cgraph_node::get_create (fn);
5712 return fn;
5715 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5716 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5717 KNOWN_BINFO carries the binfo describing the true type of
5718 OBJ_TYPE_REF_OBJECT(REF).
5719 Set CAN_REFER if non-NULL to false if method
5720 is not referable or if the virtual table is ill-formed (such as rewriten
5721 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5723 tree
5724 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5725 bool *can_refer)
5727 unsigned HOST_WIDE_INT offset;
5728 tree v;
5730 v = BINFO_VTABLE (known_binfo);
5731 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5732 if (!v)
5733 return NULL_TREE;
5735 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5737 if (can_refer)
5738 *can_refer = false;
5739 return NULL_TREE;
5741 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5744 /* Return true iff VAL is a gimple expression that is known to be
5745 non-negative. Restricted to floating-point inputs. */
5747 bool
5748 gimple_val_nonnegative_real_p (tree val)
5750 gimple def_stmt;
5752 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5754 /* Use existing logic for non-gimple trees. */
5755 if (tree_expr_nonnegative_p (val))
5756 return true;
5758 if (TREE_CODE (val) != SSA_NAME)
5759 return false;
5761 /* Currently we look only at the immediately defining statement
5762 to make this determination, since recursion on defining
5763 statements of operands can lead to quadratic behavior in the
5764 worst case. This is expected to catch almost all occurrences
5765 in practice. It would be possible to implement limited-depth
5766 recursion if important cases are lost. Alternatively, passes
5767 that need this information (such as the pow/powi lowering code
5768 in the cse_sincos pass) could be revised to provide it through
5769 dataflow propagation. */
5771 def_stmt = SSA_NAME_DEF_STMT (val);
5773 if (is_gimple_assign (def_stmt))
5775 tree op0, op1;
5777 /* See fold-const.c:tree_expr_nonnegative_p for additional
5778 cases that could be handled with recursion. */
5780 switch (gimple_assign_rhs_code (def_stmt))
5782 case ABS_EXPR:
5783 /* Always true for floating-point operands. */
5784 return true;
5786 case MULT_EXPR:
5787 /* True if the two operands are identical (since we are
5788 restricted to floating-point inputs). */
5789 op0 = gimple_assign_rhs1 (def_stmt);
5790 op1 = gimple_assign_rhs2 (def_stmt);
5792 if (op0 == op1
5793 || operand_equal_p (op0, op1, 0))
5794 return true;
5796 default:
5797 return false;
5800 else if (is_gimple_call (def_stmt))
5802 tree fndecl = gimple_call_fndecl (def_stmt);
5803 if (fndecl
5804 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5806 tree arg1;
5808 switch (DECL_FUNCTION_CODE (fndecl))
5810 CASE_FLT_FN (BUILT_IN_ACOS):
5811 CASE_FLT_FN (BUILT_IN_ACOSH):
5812 CASE_FLT_FN (BUILT_IN_CABS):
5813 CASE_FLT_FN (BUILT_IN_COSH):
5814 CASE_FLT_FN (BUILT_IN_ERFC):
5815 CASE_FLT_FN (BUILT_IN_EXP):
5816 CASE_FLT_FN (BUILT_IN_EXP10):
5817 CASE_FLT_FN (BUILT_IN_EXP2):
5818 CASE_FLT_FN (BUILT_IN_FABS):
5819 CASE_FLT_FN (BUILT_IN_FDIM):
5820 CASE_FLT_FN (BUILT_IN_HYPOT):
5821 CASE_FLT_FN (BUILT_IN_POW10):
5822 return true;
5824 CASE_FLT_FN (BUILT_IN_SQRT):
5825 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5826 nonnegative inputs. */
5827 if (!HONOR_SIGNED_ZEROS (val))
5828 return true;
5830 break;
5832 CASE_FLT_FN (BUILT_IN_POWI):
5833 /* True if the second argument is an even integer. */
5834 arg1 = gimple_call_arg (def_stmt, 1);
5836 if (TREE_CODE (arg1) == INTEGER_CST
5837 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5838 return true;
5840 break;
5842 CASE_FLT_FN (BUILT_IN_POW):
5843 /* True if the second argument is an even integer-valued
5844 real. */
5845 arg1 = gimple_call_arg (def_stmt, 1);
5847 if (TREE_CODE (arg1) == REAL_CST)
5849 REAL_VALUE_TYPE c;
5850 HOST_WIDE_INT n;
5852 c = TREE_REAL_CST (arg1);
5853 n = real_to_integer (&c);
5855 if ((n & 1) == 0)
5857 REAL_VALUE_TYPE cint;
5858 real_from_integer (&cint, VOIDmode, n, SIGNED);
5859 if (real_identical (&c, &cint))
5860 return true;
5864 break;
5866 default:
5867 return false;
5872 return false;
5875 /* Given a pointer value OP0, return a simplified version of an
5876 indirection through OP0, or NULL_TREE if no simplification is
5877 possible. Note that the resulting type may be different from
5878 the type pointed to in the sense that it is still compatible
5879 from the langhooks point of view. */
5881 tree
5882 gimple_fold_indirect_ref (tree t)
5884 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5885 tree sub = t;
5886 tree subtype;
5888 STRIP_NOPS (sub);
5889 subtype = TREE_TYPE (sub);
5890 if (!POINTER_TYPE_P (subtype))
5891 return NULL_TREE;
5893 if (TREE_CODE (sub) == ADDR_EXPR)
5895 tree op = TREE_OPERAND (sub, 0);
5896 tree optype = TREE_TYPE (op);
5897 /* *&p => p */
5898 if (useless_type_conversion_p (type, optype))
5899 return op;
5901 /* *(foo *)&fooarray => fooarray[0] */
5902 if (TREE_CODE (optype) == ARRAY_TYPE
5903 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5904 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5906 tree type_domain = TYPE_DOMAIN (optype);
5907 tree min_val = size_zero_node;
5908 if (type_domain && TYPE_MIN_VALUE (type_domain))
5909 min_val = TYPE_MIN_VALUE (type_domain);
5910 if (TREE_CODE (min_val) == INTEGER_CST)
5911 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5913 /* *(foo *)&complexfoo => __real__ complexfoo */
5914 else if (TREE_CODE (optype) == COMPLEX_TYPE
5915 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5916 return fold_build1 (REALPART_EXPR, type, op);
5917 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5918 else if (TREE_CODE (optype) == VECTOR_TYPE
5919 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5921 tree part_width = TYPE_SIZE (type);
5922 tree index = bitsize_int (0);
5923 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5927 /* *(p + CST) -> ... */
5928 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5929 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5931 tree addr = TREE_OPERAND (sub, 0);
5932 tree off = TREE_OPERAND (sub, 1);
5933 tree addrtype;
5935 STRIP_NOPS (addr);
5936 addrtype = TREE_TYPE (addr);
5938 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5939 if (TREE_CODE (addr) == ADDR_EXPR
5940 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5941 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5942 && tree_fits_uhwi_p (off))
5944 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5945 tree part_width = TYPE_SIZE (type);
5946 unsigned HOST_WIDE_INT part_widthi
5947 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5948 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5949 tree index = bitsize_int (indexi);
5950 if (offset / part_widthi
5951 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5952 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5953 part_width, index);
5956 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5957 if (TREE_CODE (addr) == ADDR_EXPR
5958 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5959 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5961 tree size = TYPE_SIZE_UNIT (type);
5962 if (tree_int_cst_equal (size, off))
5963 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5966 /* *(p + CST) -> MEM_REF <p, CST>. */
5967 if (TREE_CODE (addr) != ADDR_EXPR
5968 || DECL_P (TREE_OPERAND (addr, 0)))
5969 return fold_build2 (MEM_REF, type,
5970 addr,
5971 wide_int_to_tree (ptype, off));
5974 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5975 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5976 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5977 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5979 tree type_domain;
5980 tree min_val = size_zero_node;
5981 tree osub = sub;
5982 sub = gimple_fold_indirect_ref (sub);
5983 if (! sub)
5984 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5985 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5986 if (type_domain && TYPE_MIN_VALUE (type_domain))
5987 min_val = TYPE_MIN_VALUE (type_domain);
5988 if (TREE_CODE (min_val) == INTEGER_CST)
5989 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5992 return NULL_TREE;
5995 /* Return true if CODE is an operation that when operating on signed
5996 integer types involves undefined behavior on overflow and the
5997 operation can be expressed with unsigned arithmetic. */
5999 bool
6000 arith_code_with_undefined_signed_overflow (tree_code code)
6002 switch (code)
6004 case PLUS_EXPR:
6005 case MINUS_EXPR:
6006 case MULT_EXPR:
6007 case NEGATE_EXPR:
6008 case POINTER_PLUS_EXPR:
6009 return true;
6010 default:
6011 return false;
6015 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6016 operation that can be transformed to unsigned arithmetic by converting
6017 its operand, carrying out the operation in the corresponding unsigned
6018 type and converting the result back to the original type.
6020 Returns a sequence of statements that replace STMT and also contain
6021 a modified form of STMT itself. */
6023 gimple_seq
6024 rewrite_to_defined_overflow (gimple stmt)
6026 if (dump_file && (dump_flags & TDF_DETAILS))
6028 fprintf (dump_file, "rewriting stmt with undefined signed "
6029 "overflow ");
6030 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6033 tree lhs = gimple_assign_lhs (stmt);
6034 tree type = unsigned_type_for (TREE_TYPE (lhs));
6035 gimple_seq stmts = NULL;
6036 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6038 gimple_seq stmts2 = NULL;
6039 gimple_set_op (stmt, i,
6040 force_gimple_operand (fold_convert (type,
6041 gimple_op (stmt, i)),
6042 &stmts2, true, NULL_TREE));
6043 gimple_seq_add_seq (&stmts, stmts2);
6045 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6046 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6047 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6048 gimple_seq_add_stmt (&stmts, stmt);
6049 gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6050 gimple_seq_add_stmt (&stmts, cvt);
6052 return stmts;
6056 /* The valueization hook we use for the gimple_build API simplification.
6057 This makes us match fold_buildN behavior by only combining with
6058 statements in the sequence(s) we are currently building. */
6060 static tree
6061 gimple_build_valueize (tree op)
6063 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6064 return op;
6065 return NULL_TREE;
6068 /* Build the expression CODE OP0 of type TYPE with location LOC,
6069 simplifying it first if possible. Returns the built
6070 expression value and appends statements possibly defining it
6071 to SEQ. */
6073 tree
6074 gimple_build (gimple_seq *seq, location_t loc,
6075 enum tree_code code, tree type, tree op0)
6077 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6078 if (!res)
6080 if (gimple_in_ssa_p (cfun))
6081 res = make_ssa_name (type);
6082 else
6083 res = create_tmp_reg (type);
6084 gimple stmt;
6085 if (code == REALPART_EXPR
6086 || code == IMAGPART_EXPR
6087 || code == VIEW_CONVERT_EXPR)
6088 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6089 else
6090 stmt = gimple_build_assign (res, code, op0);
6091 gimple_set_location (stmt, loc);
6092 gimple_seq_add_stmt_without_update (seq, stmt);
6094 return res;
6097 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6098 simplifying it first if possible. Returns the built
6099 expression value and appends statements possibly defining it
6100 to SEQ. */
6102 tree
6103 gimple_build (gimple_seq *seq, location_t loc,
6104 enum tree_code code, tree type, tree op0, tree op1)
6106 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6107 if (!res)
6109 if (gimple_in_ssa_p (cfun))
6110 res = make_ssa_name (type);
6111 else
6112 res = create_tmp_reg (type);
6113 gimple stmt = gimple_build_assign (res, code, op0, op1);
6114 gimple_set_location (stmt, loc);
6115 gimple_seq_add_stmt_without_update (seq, stmt);
6117 return res;
6120 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6121 simplifying it first if possible. Returns the built
6122 expression value and appends statements possibly defining it
6123 to SEQ. */
6125 tree
6126 gimple_build (gimple_seq *seq, location_t loc,
6127 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6129 tree res = gimple_simplify (code, type, op0, op1, op2,
6130 seq, gimple_build_valueize);
6131 if (!res)
6133 if (gimple_in_ssa_p (cfun))
6134 res = make_ssa_name (type);
6135 else
6136 res = create_tmp_reg (type);
6137 gimple stmt;
6138 if (code == BIT_FIELD_REF)
6139 stmt = gimple_build_assign (res, code,
6140 build3 (code, type, op0, op1, op2));
6141 else
6142 stmt = gimple_build_assign (res, code, op0, op1, op2);
6143 gimple_set_location (stmt, loc);
6144 gimple_seq_add_stmt_without_update (seq, stmt);
6146 return res;
6149 /* Build the call FN (ARG0) with a result of type TYPE
6150 (or no result if TYPE is void) with location LOC,
6151 simplifying it first if possible. Returns the built
6152 expression value (or NULL_TREE if TYPE is void) and appends
6153 statements possibly defining it to SEQ. */
6155 tree
6156 gimple_build (gimple_seq *seq, location_t loc,
6157 enum built_in_function fn, tree type, tree arg0)
6159 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6160 if (!res)
6162 tree decl = builtin_decl_implicit (fn);
6163 gimple stmt = gimple_build_call (decl, 1, arg0);
6164 if (!VOID_TYPE_P (type))
6166 if (gimple_in_ssa_p (cfun))
6167 res = make_ssa_name (type);
6168 else
6169 res = create_tmp_reg (type);
6170 gimple_call_set_lhs (stmt, res);
6172 gimple_set_location (stmt, loc);
6173 gimple_seq_add_stmt_without_update (seq, stmt);
6175 return res;
6178 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6179 (or no result if TYPE is void) with location LOC,
6180 simplifying it first if possible. Returns the built
6181 expression value (or NULL_TREE if TYPE is void) and appends
6182 statements possibly defining it to SEQ. */
6184 tree
6185 gimple_build (gimple_seq *seq, location_t loc,
6186 enum built_in_function fn, tree type, tree arg0, tree arg1)
6188 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6189 if (!res)
6191 tree decl = builtin_decl_implicit (fn);
6192 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
6193 if (!VOID_TYPE_P (type))
6195 if (gimple_in_ssa_p (cfun))
6196 res = make_ssa_name (type);
6197 else
6198 res = create_tmp_reg (type);
6199 gimple_call_set_lhs (stmt, res);
6201 gimple_set_location (stmt, loc);
6202 gimple_seq_add_stmt_without_update (seq, stmt);
6204 return res;
6207 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6208 (or no result if TYPE is void) with location LOC,
6209 simplifying it first if possible. Returns the built
6210 expression value (or NULL_TREE if TYPE is void) and appends
6211 statements possibly defining it to SEQ. */
6213 tree
6214 gimple_build (gimple_seq *seq, location_t loc,
6215 enum built_in_function fn, tree type,
6216 tree arg0, tree arg1, tree arg2)
6218 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6219 seq, gimple_build_valueize);
6220 if (!res)
6222 tree decl = builtin_decl_implicit (fn);
6223 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6224 if (!VOID_TYPE_P (type))
6226 if (gimple_in_ssa_p (cfun))
6227 res = make_ssa_name (type);
6228 else
6229 res = create_tmp_reg (type);
6230 gimple_call_set_lhs (stmt, res);
6232 gimple_set_location (stmt, loc);
6233 gimple_seq_add_stmt_without_update (seq, stmt);
6235 return res;
6238 /* Build the conversion (TYPE) OP with a result of type TYPE
6239 with location LOC if such conversion is neccesary in GIMPLE,
6240 simplifying it first.
6241 Returns the built expression value and appends
6242 statements possibly defining it to SEQ. */
6244 tree
6245 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6247 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6248 return op;
6249 return gimple_build (seq, loc, NOP_EXPR, type, op);